diff options
author | Liming Gao <liming.gao@intel.com> | 2018-10-15 08:27:53 +0800 |
---|---|---|
committer | Liming Gao <liming.gao@intel.com> | 2018-10-15 08:29:14 +0800 |
commit | 1ccc4d895dd8d659d016efcd6ef8a48749aba1d0 (patch) | |
tree | 0d5f58643cc72275887d3bb322813609906a9334 /BaseTools/Source/Python/AutoGen | |
parent | 678f85131238622e576705117e299d81cff755c9 (diff) | |
download | edk2-1ccc4d895dd8d659d016efcd6ef8a48749aba1d0.tar.gz edk2-1ccc4d895dd8d659d016efcd6ef8a48749aba1d0.tar.bz2 edk2-1ccc4d895dd8d659d016efcd6ef8a48749aba1d0.zip |
Revert BaseTools: PYTHON3 migration
This reverts commit 6693f359b3c213513c5096a06c6f67244a44dc52..
678f85131238622e576705117e299d81cff755c9.
Python3 migration is the fundamental change. It requires every developer
to install Python3. Before this migration, the well communication and wide
verification must be done. But now, most people is not aware of this change,
and not try it. So, Python3 migration is reverted and be moved to edk2-staging
Python3 branch for the edk2 user evaluation.
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Liming Gao <liming.gao@intel.com>
Diffstat (limited to 'BaseTools/Source/Python/AutoGen')
-rw-r--r-- | BaseTools/Source/Python/AutoGen/AutoGen.py | 107 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/BuildEngine.py | 1 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenC.py | 26 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenMake.py | 43 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenPcdDb.py | 53 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenVar.py | 26 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/IdfClassObject.py | 1 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/InfSectionParser.py | 2 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/StrGather.py | 7 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/UniClassObject.py | 15 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py | 10 |
11 files changed, 148 insertions, 143 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py index c8309d5ed3..7c0bb47f77 100644 --- a/BaseTools/Source/Python/AutoGen/AutoGen.py +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -15,6 +15,8 @@ ## Import Modules
#
+from __future__ import print_function
+from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
import os.path as path
@@ -198,11 +200,11 @@ class AutoGen(object): # if it exists, just return it directly
return cls.__ObjectCache[Key]
# it didnt exist. create it, cache it, then return it
- RetVal = cls.__ObjectCache[Key] = super().__new__(cls)
+ RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)
return RetVal
def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
- super().__init__()
+ super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
## hash() operator
#
@@ -235,7 +237,7 @@ class WorkspaceAutoGen(AutoGen): # call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
- super().__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+ super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._Init = True
@@ -295,7 +297,7 @@ class WorkspaceAutoGen(AutoGen): SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
% (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
- self.ArchList = tuple(sorted(ArchList))
+ self.ArchList = tuple(ArchList)
# Validate build target
if self.BuildTarget not in self.Platform.BuildTargets:
@@ -518,7 +520,7 @@ class WorkspaceAutoGen(AutoGen): for BuildData in PGen.BuildDatabase._CACHE_.values():
if BuildData.Arch != Arch:
continue
- for key in list(BuildData.Pcds.keys()):
+ for key in BuildData.Pcds:
for SinglePcd in GlobalData.MixedPcd:
if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
for item in GlobalData.MixedPcd[SinglePcd]:
@@ -616,17 +618,17 @@ class WorkspaceAutoGen(AutoGen): #
content = 'gCommandLineDefines: '
content += str(GlobalData.gCommandLineDefines)
- content += "\n"
+ content += os.linesep
content += 'BuildOptionPcd: '
content += str(GlobalData.BuildOptionPcd)
- content += "\n"
+ content += os.linesep
content += 'Active Platform: '
content += str(self.Platform)
- content += "\n"
+ content += os.linesep
if self.FdfFile:
content += 'Flash Image Definition: '
content += str(self.FdfFile)
- content += "\n"
+ content += os.linesep
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
#
@@ -636,7 +638,7 @@ class WorkspaceAutoGen(AutoGen): if Pa.PcdTokenNumber:
if Pa.DynamicPcdList:
for Pcd in Pa.DynamicPcdList:
- PcdTokenNumber += "\n"
+ PcdTokenNumber += os.linesep
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
PcdTokenNumber += ' : '
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
@@ -661,7 +663,7 @@ class WorkspaceAutoGen(AutoGen): for files in AllWorkSpaceMetaFiles:
if files.endswith('.dec'):
continue
- f = open(files, 'rb')
+ f = open(files, 'r')
Content = f.read()
f.close()
m.update(Content)
@@ -677,7 +679,7 @@ class WorkspaceAutoGen(AutoGen): if not os.path.exists(self.BuildDir):
os.makedirs(self.BuildDir)
with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
- for f in sorted(AllWorkSpaceMetaFiles):
+ for f in AllWorkSpaceMetaFiles:
print(f, file=file)
return True
@@ -690,7 +692,7 @@ class WorkspaceAutoGen(AutoGen): HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
m = hashlib.md5()
# Get .dec file's hash value
- f = open(Pkg.MetaFile.Path, 'rb')
+ f = open(Pkg.MetaFile.Path, 'r')
Content = f.read()
f.close()
m.update(Content)
@@ -700,7 +702,7 @@ class WorkspaceAutoGen(AutoGen): for Root, Dirs, Files in os.walk(str(inc)):
for File in sorted(Files):
File_Path = os.path.join(Root, File)
- f = open(File_Path, 'rb')
+ f = open(File_Path, 'r')
Content = f.read()
f.close()
m.update(Content)
@@ -864,7 +866,7 @@ class WorkspaceAutoGen(AutoGen): def _CheckAllPcdsTokenValueConflict(self):
for Pa in self.AutoGenObjectList:
for Package in Pa.PackageList:
- PcdList = list(Package.Pcds.values())
+ PcdList = Package.Pcds.values()
PcdList.sort(key=lambda x: int(x.TokenValue, 0))
Count = 0
while (Count < len(PcdList) - 1) :
@@ -910,7 +912,7 @@ class WorkspaceAutoGen(AutoGen): Count += SameTokenValuePcdListCount
Count += 1
- PcdList = list(Package.Pcds.values())
+ PcdList = Package.Pcds.values()
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
Count = 0
while (Count < len(PcdList) - 1) :
@@ -973,7 +975,7 @@ class PlatformAutoGen(AutoGen): # call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
- super().__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+ super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
self._Init = True
#
@@ -1179,7 +1181,7 @@ class PlatformAutoGen(AutoGen): if os.path.exists(VpdMapFilePath):
OrgVpdFile.Read(VpdMapFilePath)
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
- NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
+ NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'
else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
@@ -1229,7 +1231,7 @@ class PlatformAutoGen(AutoGen): FdfModuleList.append(os.path.normpath(InfName))
for M in self._MaList:
# F is the Module for which M is the module autogen
- for PcdFromModule in list(M.ModulePcdList) + list(M.LibraryPcdList):
+ for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
@@ -1378,7 +1380,7 @@ class PlatformAutoGen(AutoGen): if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
- Sku = list(Pcd.SkuInfoList.values())[0]
+ Sku = Pcd.SkuInfoList.values()[0]
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
@@ -1477,7 +1479,7 @@ class PlatformAutoGen(AutoGen): if not FoundFlag :
# just pick the a value to determine whether is unicode string type
SkuValueMap = {}
- SkuObjList = list(DscPcdEntry.SkuInfoList.items())
+ SkuObjList = DscPcdEntry.SkuInfoList.items()
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku:
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
@@ -1503,7 +1505,7 @@ class PlatformAutoGen(AutoGen): DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
# Only fix the value while no value provided in DSC file.
if not Sku.DefaultValue:
- DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
+ DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
if DscPcdEntry not in self._DynamicPcdList:
self._DynamicPcdList.append(DscPcdEntry)
@@ -1579,7 +1581,7 @@ class PlatformAutoGen(AutoGen): # Delete the DynamicPcdList At the last time enter into this function
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
- Sku = list(Pcd.SkuInfoList.values())[0]
+ Sku = Pcd.SkuInfoList.values()[0]
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
@@ -1598,14 +1600,11 @@ class PlatformAutoGen(AutoGen): self._DynamicPcdList.extend(list(UnicodePcdArray))
self._DynamicPcdList.extend(list(HiiPcdArray))
self._DynamicPcdList.extend(list(OtherPcdArray))
- #python3.6 set is not ordered at all
- self._DynamicPcdList = sorted(self._DynamicPcdList, key=lambda x:(x.TokenSpaceGuidCName, x.TokenCName))
- self._NonDynamicPcdList = sorted(self._NonDynamicPcdList, key=lambda x: (x.TokenSpaceGuidCName, x.TokenCName))
allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset:
- if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
+ if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:
continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId
@@ -2147,7 +2146,7 @@ class PlatformAutoGen(AutoGen): Pcd.MaxDatumSize = str(len(Value.split(',')))
else:
Pcd.MaxDatumSize = str(len(Value) - 1)
- return list(Pcds.values())
+ return Pcds.values()
## Resolve library names to library modules
#
@@ -2251,7 +2250,7 @@ class PlatformAutoGen(AutoGen): # Use the highest priority value.
#
if (len(OverrideList) >= 2):
- KeyList = list(OverrideList.keys())
+ KeyList = OverrideList.keys()
for Index in range(len(KeyList)):
NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
@@ -2373,11 +2372,11 @@ class PlatformAutoGen(AutoGen): if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
BuildRuleOrder = Options[Tool][Attr]
- AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
- list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
- list(self.ToolDefinition.keys()))
+ AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +
+ PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +
+ self.ToolDefinition.keys())
BuildOptions = defaultdict(lambda: defaultdict(str))
- for Tool in sorted(AllTools):
+ for Tool in AllTools:
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
if Tool not in Options:
continue
@@ -2428,7 +2427,7 @@ class ModuleAutoGen(AutoGen): # call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
- super().__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+ super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
self._Init = True
@@ -2442,7 +2441,7 @@ class ModuleAutoGen(AutoGen): EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
% (MetaFile, Arch))
return None
- return super().__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+ return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
## Initialize ModuleAutoGen
#
@@ -3159,12 +3158,12 @@ class ModuleAutoGen(AutoGen): @cached_property
def IntroTargetList(self):
self.Targets
- return sorted(self._IntroBuildTargetList, key=lambda x: str(x.Target))
+ return self._IntroBuildTargetList
@cached_property
def CodaTargetList(self):
self.Targets
- return sorted(self._FinalBuildTargetList, key=lambda x: str(x.Target))
+ return self._FinalBuildTargetList
@cached_property
def FileTypes(self):
@@ -3210,7 +3209,7 @@ class ModuleAutoGen(AutoGen): AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True
@@ -3221,7 +3220,7 @@ class ModuleAutoGen(AutoGen): AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True
@@ -3439,7 +3438,7 @@ class ModuleAutoGen(AutoGen): return None
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
- VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
+ VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
if not VfrUniOffsetList:
return None
@@ -3452,7 +3451,7 @@ class ModuleAutoGen(AutoGen): EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of BytesIO to cache data
- fStringIO = BytesIO()
+ fStringIO = BytesIO('')
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
@@ -3462,7 +3461,8 @@ class ModuleAutoGen(AutoGen): # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
- fStringIO.write(bytes(UniGuid))
+ UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
+ fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@@ -3472,7 +3472,8 @@ class ModuleAutoGen(AutoGen): # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
- fStringIO.write(bytes(VfrGuid))
+ VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
+ fStringIO.write(''.join(VfrGuid))
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
@@ -3524,7 +3525,7 @@ class ModuleAutoGen(AutoGen): Packages = []
PcdCheckList = []
PcdTokenSpaceList = []
- for Pcd in list(self.ModulePcdList) + list(self.LibraryPcdList):
+ for Pcd in self.ModulePcdList + self.LibraryPcdList:
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
PatchablePcds.append(Pcd)
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
@@ -3636,10 +3637,6 @@ class ModuleAutoGen(AutoGen): AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
else:
AsBuiltInfDict['binary_item'].append('BIN|' + File)
- if not self.DepexGenerated:
- DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
- if os.path.exists(DepexFile):
- self.DepexGenerated = True
if self.DepexGenerated:
self.OutputFile.add(self.Name + '.depex')
if self.ModuleType in [SUP_MODULE_PEIM]:
@@ -3736,7 +3733,7 @@ class ModuleAutoGen(AutoGen): Padding = '0x00, '
if Unicode:
Padding = Padding * 2
- ArraySize = ArraySize // 2
+ ArraySize = ArraySize / 2
if ArraySize < (len(PcdValue) + 1):
if Pcd.MaxSizeUserSet:
EdkLogger.error("build", AUTOGEN_ERROR,
@@ -3896,7 +3893,7 @@ class ModuleAutoGen(AutoGen): if os.path.exists (self.TimeStampPath):
os.remove (self.TimeStampPath)
with open(self.TimeStampPath, 'w+') as file:
- for f in sorted(FileSet):
+ for f in FileSet:
print(f, file=file)
# Ignore generating makefile when it is a binary module
@@ -4024,29 +4021,29 @@ class ModuleAutoGen(AutoGen): GlobalData.gModuleHash[self.Arch] = {}
m = hashlib.md5()
# Add Platform level hash
- m.update(GlobalData.gPlatformHash.encode('utf-8'))
+ m.update(GlobalData.gPlatformHash)
# Add Package level hash
if self.DependentPackageList:
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
- m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))
+ m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])
# Add Library hash
if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
Lib.GenModuleHash()
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
+ m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])
# Add Module self
- f = open(str(self.MetaFile), 'rb')
+ f = open(str(self.MetaFile), 'r')
Content = f.read()
f.close()
m.update(Content)
# Add Module's source files
if self.SourceFileList:
for File in sorted(self.SourceFileList, key=lambda x: str(x)):
- f = open(str(File), 'rb')
+ f = open(str(File), 'r')
Content = f.read()
f.close()
m.update(Content)
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py index 86a3c09ad5..ac7a668755 100644 --- a/BaseTools/Source/Python/AutoGen/BuildEngine.py +++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py @@ -14,6 +14,7 @@ ##
# Import Modules
#
+from __future__ import print_function
import Common.LongFilePathOs as os
import re
import copy
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py index 4db5330e51..09626d0b96 100644 --- a/BaseTools/Source/Python/AutoGen/GenC.py +++ b/BaseTools/Source/Python/AutoGen/GenC.py @@ -13,6 +13,7 @@ ## Import Modules
#
+from __future__ import absolute_import
import string
import collections
import struct
@@ -936,7 +937,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): if Info.IsLibrary:
PcdList = Info.LibraryPcdList
else:
- PcdList = list(Info.ModulePcdList) + list(Info.LibraryPcdList)
+ PcdList = Info.ModulePcdList + Info.LibraryPcdList
PcdExCNameTest = 0
for PcdModule in PcdList:
if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
@@ -970,7 +971,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
PcdCNameTest = 0
- for PcdModule in list(Info.LibraryPcdList) + list(Info.ModulePcdList):
+ for PcdModule in Info.LibraryPcdList + Info.ModulePcdList:
if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
PcdCNameTest += 1
# get out early once we found > 1...
@@ -1011,10 +1012,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): Value = Value[:-1]
ValueNumber = int (Value, 0)
except:
- try:
- ValueNumber = int(Value.lstrip('0'))
- except:
- EdkLogger.error("build", AUTOGEN_ERROR,
+ EdkLogger.error("build", AUTOGEN_ERROR,
"PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
if ValueNumber < 0:
@@ -1053,7 +1051,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): else:
NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
if Unicode:
- ArraySize = ArraySize // 2
+ ArraySize = ArraySize / 2
Value = NewValue + '0 }'
if ArraySize < ValueSize:
if Pcd.MaxSizeUserSet:
@@ -1063,7 +1061,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): else:
ArraySize = Pcd.GetPcdSize()
if Unicode:
- ArraySize = ArraySize // 2
+ ArraySize = ArraySize / 2
Array = '[%d]' % ArraySize
#
# skip casting for fixed at build since it breaks ARM assembly.
@@ -1798,7 +1796,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer): TempBuffer += Buffer
elif File.Ext.upper() == '.JPG':
ImageType, = struct.unpack('4s', Buffer[6:10])
- if ImageType != b'JFIF':
+ if ImageType != 'JFIF':
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
TempBuffer += pack('I', len(Buffer))
@@ -1898,7 +1896,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer): def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2])
- if ImageType!= b'BM': # BMP file type is 'BM'
+ if ImageType!= 'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
@@ -1922,7 +1920,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
ImageBuffer += pack('B', PaletteIndex)
- Width = (BmpHeader.biWidth + 7)//8
+ Width = (BmpHeader.biWidth + 7)/8
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 4:
@@ -1931,7 +1929,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
ImageBuffer += pack('B', PaletteIndex)
- Width = (BmpHeader.biWidth + 1)//2
+ Width = (BmpHeader.biWidth + 1)/2
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 8:
@@ -1970,7 +1968,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): for Index in range(0, len(PaletteBuffer)):
if Index % 4 == 3:
continue
- PaletteTemp += bytes([PaletteBuffer[Index]])
+ PaletteTemp += PaletteBuffer[Index]
PaletteBuffer = PaletteTemp[1:]
return ImageBuffer, PaletteBuffer
@@ -2068,7 +2066,7 @@ def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer, if Guid in Info.Module.GetGuidsUsedByPcd():
continue
GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
- for Guid, Value in list(Info.Module.Protocols.items()) + list(Info.Module.Ppis.items()):
+ for Guid, Value in Info.Module.Protocols.items() + Info.Module.Ppis.items():
GuidMacros.append('#define %s %s' % (Guid, Value))
# supports FixedAtBuild and FeaturePcd usage in VFR file
if Info.VfrFileList and Info.ModulePcdList:
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py index 2f6fe06764..b4377eef17 100644 --- a/BaseTools/Source/Python/AutoGen/GenMake.py +++ b/BaseTools/Source/Python/AutoGen/GenMake.py @@ -13,6 +13,7 @@ ## Import Modules
#
+from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
import string
@@ -491,7 +492,7 @@ cleanlib: # EdkII modules always use "_ModuleEntryPoint" as entry point
ImageEntryPoint = "_ModuleEntryPoint"
- for k, v in MyAgo.Module.Defines.items():
+ for k, v in MyAgo.Module.Defines.iteritems():
if k not in MyAgo.Macros:
MyAgo.Macros[k] = v
@@ -503,7 +504,7 @@ cleanlib: MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
PCI_COMPRESS_Flag = False
- for k, v in MyAgo.Module.Defines.items():
+ for k, v in MyAgo.Module.Defines.iteritems():
if 'PCI_COMPRESS' == k and 'TRUE' == v:
PCI_COMPRESS_Flag = True
@@ -654,7 +655,7 @@ cleanlib: "module_relative_directory" : MyAgo.SourceDir,
"module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
"package_relative_directory": package_rel_dir,
- "module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.items()],
+ "module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.iteritems()],
"architecture" : MyAgo.Arch,
"toolchain_tag" : MyAgo.ToolChain,
@@ -668,8 +669,8 @@ cleanlib: "separator" : Separator,
"module_tool_definitions" : ToolsDef,
- "shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
- "shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
+ "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
+ "shell_command" : self._SHELL_CMD_[self._FileType].values(),
"module_entry_point" : ModuleEntryPoint,
"image_entry_point" : ImageEntryPoint,
@@ -917,7 +918,7 @@ cleanlib: #
# Extract common files list in the dependency files
#
- for File in sorted(DepSet, key=lambda x: str(x)):
+ for File in DepSet:
self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros))
for File in FileDependencyDict:
@@ -926,11 +927,11 @@ cleanlib: continue
NewDepSet = set(FileDependencyDict[File])
NewDepSet -= DepSet
- FileDependencyDict[File] = ["$(COMMON_DEPS)"] + sorted(NewDepSet, key=lambda x: str(x))
+ FileDependencyDict[File] = ["$(COMMON_DEPS)"] + list(NewDepSet)
# Convert target description object to target string in makefile
for Type in self._AutoGenObject.Targets:
- for T in sorted(self._AutoGenObject.Targets[Type], key=lambda x: str(x)):
+ for T in self._AutoGenObject.Targets[Type]:
# Generate related macros if needed
if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
self.FileListMacros[T.FileListMacro] = []
@@ -1031,7 +1032,7 @@ cleanlib: CurrentFileDependencyList = DepDb[F]
else:
try:
- Fd = open(F.Path, 'rb')
+ Fd = open(F.Path, 'r')
except BaseException as X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
@@ -1041,14 +1042,8 @@ cleanlib: continue
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
- FileContent = str(FileContent, encoding="utf-16")
- IncludedFileList = gIncludePattern.findall(FileContent)
- else:
- try:
- FileContent = str(FileContent, encoding="utf-8")
- IncludedFileList = gIncludePattern.findall(FileContent)
- except:
- continue
+ FileContent = unicode(FileContent, "utf-16")
+ IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList:
Inc = Inc.strip()
@@ -1097,7 +1092,7 @@ cleanlib: DependencySet.update(ForceList)
if File in DependencySet:
DependencySet.remove(File)
- DependencyList = sorted(DependencySet, key=lambda x: str(x)) # remove duplicate ones
+ DependencyList = list(DependencySet) # remove duplicate ones
return DependencyList
@@ -1274,8 +1269,8 @@ ${BEGIN}\t-@${create_directory_command}\n${END}\ "separator" : Separator,
"module_tool_definitions" : ToolsDef,
- "shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
- "shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
+ "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
+ "shell_command" : self._SHELL_CMD_[self._FileType].values(),
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
"custom_makefile_content" : CustomMakefile
@@ -1448,8 +1443,8 @@ cleanlib: "toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
- "shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
- "shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
+ "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
+ "shell_command" : self._SHELL_CMD_[self._FileType].values(),
"build_architecture_list" : MyAgo.Arch,
"architecture" : MyAgo.Arch,
"separator" : Separator,
@@ -1584,8 +1579,8 @@ class TopLevelMakefile(BuildFile): "toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
- "shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
- "shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
+ "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
+ "shell_command" : self._SHELL_CMD_[self._FileType].values(),
'arch' : list(MyAgo.ArchList),
"build_architecture_list" : ','.join(MyAgo.ArchList),
"separator" : Separator,
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py index 68f92ef063..876fcf1efb 100644 --- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py +++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py @@ -10,6 +10,7 @@ # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
+from __future__ import absolute_import
from io import BytesIO
from Common.Misc import *
from Common.StringUtils import StringToArray
@@ -257,7 +258,7 @@ class DbItemList: # Variable length, need to calculate one by one
#
assert(Index < len(self.RawDataList))
- for ItemIndex in range(Index):
+ for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex])
else:
Offset = self.ItemSize * Index
@@ -291,7 +292,7 @@ class DbItemList: PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
- Buffer = bytearray()
+ Buffer = ''
for Datas in self.RawDataList:
if type(Datas) in (list, tuple):
for Data in Datas:
@@ -316,7 +317,7 @@ class DbExMapTblItemList (DbItemList): DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self):
- Buffer = bytearray()
+ Buffer = ''
PackStr = "=LHH"
for Datas in self.RawDataList:
Buffer += pack(PackStr,
@@ -344,7 +345,7 @@ class DbComItemList (DbItemList): assert(False)
else:
assert(Index < len(self.RawDataList))
- for ItemIndex in range(Index):
+ for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
return Offset
@@ -365,7 +366,7 @@ class DbComItemList (DbItemList): def PackData(self):
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
- Buffer = bytearray()
+ Buffer = ''
for DataList in self.RawDataList:
for Data in DataList:
if type(Data) in (list, tuple):
@@ -386,7 +387,7 @@ class DbVariableTableItemList (DbComItemList): def PackData(self):
PackStr = "=LLHHLHH"
- Buffer = bytearray()
+ Buffer = ''
for DataList in self.RawDataList:
for Data in DataList:
Buffer += pack(PackStr,
@@ -410,7 +411,7 @@ class DbStringHeadTableItemList(DbItemList): # Variable length, need to calculate one by one
#
assert(Index < len(self.RawDataList))
- for ItemIndex in range(Index):
+ for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex])
else:
for innerIndex in range(Index):
@@ -447,7 +448,7 @@ class DbSkuHeadTableItemList (DbItemList): def PackData(self):
PackStr = "=LL"
- Buffer = bytearray()
+ Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
@@ -469,7 +470,7 @@ class DbSizeTableItemList (DbItemList): return length * self.ItemSize
def PackData(self):
PackStr = "=H"
- Buffer = bytearray()
+ Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]))
@@ -494,14 +495,14 @@ class DbStringItemList (DbComItemList): assert(len(RawDataList) == len(LenList))
DataList = []
# adjust DataList according to the LenList
- for Index in range(len(RawDataList)):
+ for Index in xrange(len(RawDataList)):
Len = LenList[Index]
RawDatas = RawDataList[Index]
assert(Len >= len(RawDatas))
ActualDatas = []
- for i in range(len(RawDatas)):
+ for i in xrange(len(RawDatas)):
ActualDatas.append(RawDatas[i])
- for i in range(len(RawDatas), Len):
+ for i in xrange(len(RawDatas), Len):
ActualDatas.append(0)
DataList.append(ActualDatas)
self.LenList = LenList
@@ -510,7 +511,7 @@ class DbStringItemList (DbComItemList): Offset = 0
assert(Index < len(self.LenList))
- for ItemIndex in range(Index):
+ for ItemIndex in xrange(Index):
Offset += self.LenList[ItemIndex]
return Offset
@@ -611,7 +612,7 @@ def BuildExDataBase(Dict): DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
VpdHeadValue = Dict['VPD_DB_VALUE']
DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
- ExMapTable = list(zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX']))
+ ExMapTable = zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX'])
DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
@@ -645,7 +646,7 @@ def BuildExDataBase(Dict): PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
- SizeTableValue = list(zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']))
+ SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
@@ -698,7 +699,7 @@ def BuildExDataBase(Dict): # Get offset of SkuId table in the database
SkuIdTableOffset = FixedHeaderLen
- for DbIndex in range(len(DbTotal)):
+ for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is SkuidValue:
break
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
@@ -710,7 +711,7 @@ def BuildExDataBase(Dict): for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
DbIndex = 0
DbOffset = FixedHeaderLen
- for DbIndex in range(len(DbTotal)):
+ for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is Table:
DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
break
@@ -736,7 +737,7 @@ def BuildExDataBase(Dict): (VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
DbIndex = 0
DbOffset = FixedHeaderLen
- for DbIndex in range(len(DbTotal)):
+ for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is VariableRefTable:
DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
break
@@ -756,7 +757,7 @@ def BuildExDataBase(Dict): # calculate various table offset now
DbTotalLength = FixedHeaderLen
- for DbIndex in range(len(DbItemTotal)):
+ for DbIndex in xrange(len(DbItemTotal)):
if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
LocalTokenNumberTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbExMapTable:
@@ -849,7 +850,7 @@ def BuildExDataBase(Dict): Index = 0
for Item in DbItemTotal:
Index +=1
- b = bytes(Item.PackData())
+ b = Item.PackData()
Buffer += b
if Index == InitTableNum:
if len(Buffer) % 8:
@@ -917,9 +918,9 @@ def CreatePcdDataBase(PcdDBData): totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32]
for i in range(4):
- newbuffer += bytes([totallenbuff[i]])
+ newbuffer += totallenbuff[i]
for i in range(36, totallen):
- newbuffer += bytes([databasebuff[i]])
+ newbuffer += databasebuff[i]
return newbuffer
@@ -962,7 +963,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase): AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = ()
for item in PcdDbBuffer:
- final_data += unpack("B", bytes([item]))
+ final_data += unpack("B", item)
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname, skuid)] = VarCheckTab
@@ -975,7 +976,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase): AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = ()
for item in PcdDbBuffer:
- final_data += unpack("B", bytes([item]))
+ final_data += unpack("B", item)
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
@@ -1348,7 +1349,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase): DbValueList.append(Sku.DefaultValue)
- Pcd.TokenTypeList = sorted(set(Pcd.TokenTypeList))
+ Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
if Pcd.DatumType == TAB_VOID:
Dict['SIZE_TABLE_CNAME'].append(CName)
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
@@ -1449,7 +1450,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase): Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
- Pcd.TokenTypeList = sorted(set(Pcd.TokenTypeList))
+ Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
# search the Offset and Table, used by LocalTokenNumberTableOffset
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
diff --git a/BaseTools/Source/Python/AutoGen/GenVar.py b/BaseTools/Source/Python/AutoGen/GenVar.py index d06fbe7e17..036f00e2bb 100644 --- a/BaseTools/Source/Python/AutoGen/GenVar.py +++ b/BaseTools/Source/Python/AutoGen/GenVar.py @@ -66,7 +66,7 @@ class VariableMgr(object): data = value_list[0]
value_list = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
- value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
+ value_list.append(hex(unpack("B", data_byte)[0]))
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list
try:
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"
@@ -87,13 +87,13 @@ class VariableMgr(object): data = value_list[0]
value_list = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
- value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
+ value_list.append(hex(unpack("B", data_byte)[0]))
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine)
for offset in newvalue:
value_list,itemPcdname,itemPcdDscLine = newvalue[offset]
if offset > len(BaseValue) or (offset + len(value_list) > len(BaseValue)):
EdkLogger.error("build", AUTOGEN_ERROR, "The EFI Variable referred by PCD %s in line %s exceeds variable size: %s\n" % (itemPcdname,itemPcdDscLine,hex(len(BaseValue))))
- for i in range(len(value_list)):
+ for i in xrange(len(value_list)):
BaseValue[offset + i] = value_list[i]
newvaluestr = "{" + ",".join(BaseValue) +"}"
return newvaluestr
@@ -129,7 +129,7 @@ class VariableMgr(object): for current_valuedict_key in ordered_valuedict_keys:
if current_valuedict_key < len(var_value):
raise
- for _ in range(current_valuedict_key - len(var_value)):
+ for _ in xrange(current_valuedict_key - len(var_value)):
var_value.append('0x00')
var_value += valuedict[current_valuedict_key]
return var_value
@@ -161,7 +161,7 @@ class VariableMgr(object): default_data_array = ()
for item in default_data_buffer:
- default_data_array += unpack("B", bytes([item]))
+ default_data_array += unpack("B", item)
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
@@ -179,7 +179,7 @@ class VariableMgr(object): others_data_array = ()
for item in others_data_buffer:
- others_data_array += unpack("B", bytes([item]))
+ others_data_array += unpack("B", item)
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
@@ -195,7 +195,7 @@ class VariableMgr(object): return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
- NvStoreDataBuffer = bytearray()
+ NvStoreDataBuffer = ""
var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize
for default_data, default_info in pcds_default_data.values():
@@ -222,7 +222,7 @@ class VariableMgr(object): nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
- data_delta_structure_buffer = bytearray()
+ data_delta_structure_buffer = ""
for skuname, defaultstore in var_data:
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue
@@ -254,7 +254,7 @@ class VariableMgr(object): def unpack_data(data):
final_data = ()
for item in data:
- final_data += unpack("B", bytes([item]))
+ final_data += unpack("B", item)
return final_data
@staticmethod
@@ -322,7 +322,7 @@ class VariableMgr(object): @staticmethod
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
- Buffer = bytearray()
+ Buffer = ""
data_len = 0
if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","):
@@ -352,7 +352,7 @@ class VariableMgr(object): @staticmethod
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
- Buffer = bytearray()
+ Buffer = ""
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstoragename))
@@ -377,7 +377,7 @@ class VariableMgr(object): def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
- Buffer = bytearray()
+ Buffer = ""
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstorageid))
@@ -400,7 +400,7 @@ class VariableMgr(object): @staticmethod
def PACK_VARIABLE_NAME(var_name):
- Buffer = bytearray()
+ Buffer = ""
for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B", int(name_char, 16))
diff --git a/BaseTools/Source/Python/AutoGen/IdfClassObject.py b/BaseTools/Source/Python/AutoGen/IdfClassObject.py index f96a16abab..b227b10258 100644 --- a/BaseTools/Source/Python/AutoGen/IdfClassObject.py +++ b/BaseTools/Source/Python/AutoGen/IdfClassObject.py @@ -13,6 +13,7 @@ ##
# Import Modules
#
+from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from Common.StringUtils import GetLineNo
diff --git a/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/BaseTools/Source/Python/AutoGen/InfSectionParser.py index 09e9af3fb4..d985089738 100644 --- a/BaseTools/Source/Python/AutoGen/InfSectionParser.py +++ b/BaseTools/Source/Python/AutoGen/InfSectionParser.py @@ -34,7 +34,7 @@ class InfSectionParser(): SectionData = []
try:
- FileLinesList = open(self._FilePath, "r").readlines()
+ FileLinesList = open(self._FilePath, "r", 0).readlines()
except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py index 73b2d0a3da..361d499076 100644 --- a/BaseTools/Source/Python/AutoGen/StrGather.py +++ b/BaseTools/Source/Python/AutoGen/StrGather.py @@ -14,6 +14,7 @@ ##
# Import Modules
#
+from __future__ import absolute_import
import re
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
@@ -122,8 +123,6 @@ def DecToHexList(Dec, Digit = 8): # @retval: A list for formatted hex string
#
def AscToHexList(Ascii):
- if isinstance(Ascii, bytes):
- return ['0x{0:02X}'.format(Item) for Item in Ascii]
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
## Create content of .h file
@@ -552,9 +551,9 @@ def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, Ski #
# support ISO 639-2 codes in .UNI files of EDK Shell
#
- Uni = UniFileClassObject(sorted (UniFilList, key=lambda x: x.File), True, IncludePathList)
+ Uni = UniFileClassObject(sorted (UniFilList), True, IncludePathList)
else:
- Uni = UniFileClassObject(sorted (UniFilList, key=lambda x: x.File), IsCompatibleMode, IncludePathList)
+ Uni = UniFileClassObject(sorted (UniFilList), IsCompatibleMode, IncludePathList)
else:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py index 1c86be4429..384f31b165 100644 --- a/BaseTools/Source/Python/AutoGen/UniClassObject.py +++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py @@ -16,6 +16,7 @@ ##
# Import Modules
#
+from __future__ import print_function
import Common.LongFilePathOs as os, codecs, re
import distutils.util
import Common.EdkLogger as EdkLogger
@@ -45,6 +46,18 @@ BACK_SLASH_PLACEHOLDER = u'\u0006' gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
+## Convert a python unicode string to a normal string
+#
+# Convert a python unicode string to a normal string
+# UniToStr(u'I am a string') is 'I am a string'
+#
+# @param Uni: The python unicode string
+#
+# @retval: The formatted normal string
+#
+def UniToStr(Uni):
+ return repr(Uni)[2:-1]
+
## Convert a unicode string to a Hex list
#
# Convert a unicode string to a Hex list
@@ -426,7 +439,7 @@ class UniFileClassObject(object): if EndPos != -1 and EndPos - StartPos == 6 :
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
EndStr = Line[EndPos: ]
- UniStr = Line[StartPos + 2: EndPos]
+ UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
Line = Line[0 : StartPos] + UniStr + EndStr
diff --git a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py index 77518fc1cc..edd40a1498 100644 --- a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py +++ b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py @@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object): os.mkdir(dest)
BinFileName = "PcdVarCheck.bin"
BinFilePath = os.path.join(dest, BinFileName)
- Buffer = bytearray()
+ Buffer = ''
index = 0
for var_check_tab in self.var_check_info:
index += 1
@@ -57,7 +57,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object): itemIndex += 1
realLength += 5
for v_data in item.data:
- if isinstance(v_data, int):
+ if type(v_data) in (int, long):
realLength += item.StorageWidth
else:
realLength += item.StorageWidth
@@ -137,7 +137,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object): Buffer += b
realLength += 1
for v_data in item.data:
- if isinstance(v_data, int):
+ if type(v_data) in (int, long):
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
Buffer += b
realLength += item.StorageWidth
@@ -241,7 +241,7 @@ class VAR_CHECK_PCD_VALID_OBJ(object): class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validlist, PcdDataType):
- super().__init__(VarOffset, validlist, PcdDataType)
+ super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
self.Type = 1
valid_num_list = []
for item in self.rawdata:
@@ -261,7 +261,7 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ): class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validrange, PcdDataType):
- super().__init__(VarOffset, validrange, PcdDataType)
+ super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
self.Type = 2
RangeExpr = ""
i = 0
|