summaryrefslogtreecommitdiffstats
path: root/BaseTools/Source/Python
diff options
context:
space:
mode:
authorLiming Gao <liming.gao@intel.com>2018-07-05 17:40:04 +0800
committerLiming Gao <liming.gao@intel.com>2018-07-09 10:25:47 +0800
commitf7496d717357b9af78414d19679b073403812340 (patch)
tree67621e65fd181bdf8a12d12e7706579beaaed0fb /BaseTools/Source/Python
parent39456d00f36e04b7e7efb208f350f4e83b6c3531 (diff)
downloadedk2-f7496d717357b9af78414d19679b073403812340.tar.gz
edk2-f7496d717357b9af78414d19679b073403812340.tar.bz2
edk2-f7496d717357b9af78414d19679b073403812340.zip
BaseTools: Clean up source files
1. Do not use tab characters 2. No trailing white space in one line 3. All files must end with CRLF Contributed-under: TianoCore Contribution Agreement 1.1 Signed-off-by: Liming Gao <liming.gao@intel.com> Cc: Yonghong Zhu <yonghong.zhu@intel.com> Reviewed-by: Yonghong Zhu <yonghong.zhu@intel.com>
Diffstat (limited to 'BaseTools/Source/Python')
-rw-r--r--BaseTools/Source/Python/AutoGen/AutoGen.py186
-rw-r--r--BaseTools/Source/Python/AutoGen/BuildEngine.py2
-rw-r--r--BaseTools/Source/Python/AutoGen/GenC.py74
-rw-r--r--BaseTools/Source/Python/AutoGen/GenPcdDb.py196
-rw-r--r--BaseTools/Source/Python/AutoGen/InfSectionParser.py12
-rw-r--r--BaseTools/Source/Python/AutoGen/StrGather.py26
-rw-r--r--BaseTools/Source/Python/AutoGen/UniClassObject.py18
-rw-r--r--BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py40
-rw-r--r--BaseTools/Source/Python/BPDG/BPDG.py58
-rw-r--r--BaseTools/Source/Python/BPDG/GenVpd.py130
-rw-r--r--BaseTools/Source/Python/BPDG/StringTable.py16
-rw-r--r--BaseTools/Source/Python/Common/BuildVersion.py10
-rw-r--r--BaseTools/Source/Python/Common/Database.py20
-rw-r--r--BaseTools/Source/Python/Common/Misc.py52
-rw-r--r--BaseTools/Source/Python/Common/MultipleWorkspace.py20
-rw-r--r--BaseTools/Source/Python/Common/RangeExpression.py118
-rw-r--r--BaseTools/Source/Python/Common/StringUtils.py2
-rw-r--r--BaseTools/Source/Python/Common/VariableAttributes.py14
-rw-r--r--BaseTools/Source/Python/Common/VpdInfoFile.py84
-rw-r--r--BaseTools/Source/Python/CommonDataClass/CommonClass.py12
-rw-r--r--BaseTools/Source/Python/CommonDataClass/FdfClass.py30
-rw-r--r--BaseTools/Source/Python/Ecc/CLexer.py10
-rw-r--r--BaseTools/Source/Python/Ecc/CParser.py1510
-rw-r--r--BaseTools/Source/Python/Ecc/Check.py22
-rw-r--r--BaseTools/Source/Python/Ecc/CodeFragment.py6
-rw-r--r--BaseTools/Source/Python/Ecc/CodeFragmentCollector.py122
-rw-r--r--BaseTools/Source/Python/Ecc/Configuration.py10
-rw-r--r--BaseTools/Source/Python/Ecc/Ecc.py30
-rw-r--r--BaseTools/Source/Python/Ecc/EccGlobalData.py4
-rw-r--r--BaseTools/Source/Python/Ecc/Exception.py16
-rw-r--r--BaseTools/Source/Python/Ecc/FileProfile.py8
-rw-r--r--BaseTools/Source/Python/Ecc/MetaDataParser.py48
-rw-r--r--BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py100
-rw-r--r--BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py90
-rw-r--r--BaseTools/Source/Python/Ecc/ParserWarning.py4
-rw-r--r--BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py6
-rw-r--r--BaseTools/Source/Python/Ecc/Xml/__init__.py10
-rw-r--r--BaseTools/Source/Python/Ecc/c.py12
-rw-r--r--BaseTools/Source/Python/Eot/CLexer.py10
-rw-r--r--BaseTools/Source/Python/Eot/CParser.py1510
-rw-r--r--BaseTools/Source/Python/Eot/Eot.py18
-rw-r--r--BaseTools/Source/Python/Eot/ParserWarning.py4
-rw-r--r--BaseTools/Source/Python/Eot/Report.py6
-rw-r--r--BaseTools/Source/Python/GenFds/Attribute.py4
-rw-r--r--BaseTools/Source/Python/GenFds/Capsule.py2
-rw-r--r--BaseTools/Source/Python/GenFds/CapsuleData.py18
-rw-r--r--BaseTools/Source/Python/GenFds/EfiSection.py8
-rw-r--r--BaseTools/Source/Python/GenFds/Fd.py2
-rw-r--r--BaseTools/Source/Python/GenFds/FdfParser.py146
-rw-r--r--BaseTools/Source/Python/GenFds/Ffs.py6
-rw-r--r--BaseTools/Source/Python/GenFds/FfsFileStatement.py4
-rw-r--r--BaseTools/Source/Python/GenFds/FfsInfStatement.py60
-rw-r--r--BaseTools/Source/Python/GenFds/Fv.py14
-rw-r--r--BaseTools/Source/Python/GenFds/GenFds.py30
-rw-r--r--BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py36
-rw-r--r--BaseTools/Source/Python/GenFds/GuidSection.py2
-rw-r--r--BaseTools/Source/Python/GenFds/OptRomFileStatement.py8
-rw-r--r--BaseTools/Source/Python/GenFds/OptRomInfStatement.py24
-rw-r--r--BaseTools/Source/Python/GenFds/OptionRom.py48
-rw-r--r--BaseTools/Source/Python/GenFds/Region.py4
-rw-r--r--BaseTools/Source/Python/GenFds/Section.py2
-rw-r--r--BaseTools/Source/Python/GenFds/Vtf.py20
-rw-r--r--BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py26
-rw-r--r--BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py8
-rw-r--r--BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py34
-rw-r--r--BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py32
-rw-r--r--BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt2
-rw-r--r--BaseTools/Source/Python/Table/Table.py22
-rw-r--r--BaseTools/Source/Python/Table/TableDataModel.py16
-rw-r--r--BaseTools/Source/Python/Table/TableDec.py14
-rw-r--r--BaseTools/Source/Python/Table/TableDsc.py14
-rw-r--r--BaseTools/Source/Python/Table/TableEotReport.py10
-rw-r--r--BaseTools/Source/Python/Table/TableFdf.py14
-rw-r--r--BaseTools/Source/Python/Table/TableFile.py14
-rw-r--r--BaseTools/Source/Python/Table/TableFunction.py10
-rw-r--r--BaseTools/Source/Python/Table/TableIdentifier.py8
-rw-r--r--BaseTools/Source/Python/Table/TableInf.py14
-rw-r--r--BaseTools/Source/Python/Table/TablePcd.py8
-rw-r--r--BaseTools/Source/Python/Table/TableReport.py8
-rw-r--r--BaseTools/Source/Python/TargetTool/TargetTool.py26
-rw-r--r--BaseTools/Source/Python/Trim/Trim.py22
-rw-r--r--BaseTools/Source/Python/UPT/BuildVersion.py10
-rw-r--r--BaseTools/Source/Python/UPT/Core/DependencyRules.py62
-rw-r--r--BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py70
-rw-r--r--BaseTools/Source/Python/UPT/Core/FileHook.py8
-rw-r--r--BaseTools/Source/Python/UPT/Core/IpiDb.py342
-rw-r--r--BaseTools/Source/Python/UPT/Core/PackageFile.py88
-rw-r--r--BaseTools/Source/Python/UPT/Core/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py142
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py52
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py18
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py10
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/InstallPkg.py204
-rw-r--r--BaseTools/Source/Python/UPT/InventoryWs.py50
-rw-r--r--BaseTools/Source/Python/UPT/Library/CommentGenerating.py58
-rw-r--r--BaseTools/Source/Python/UPT/Library/CommentParsing.py210
-rw-r--r--BaseTools/Source/Python/UPT/Library/DataType.py106
-rw-r--r--BaseTools/Source/Python/UPT/Library/ExpressionValidate.py118
-rw-r--r--BaseTools/Source/Python/UPT/Library/GlobalData.py10
-rw-r--r--BaseTools/Source/Python/UPT/Library/Misc.py98
-rw-r--r--BaseTools/Source/Python/UPT/Library/ParserValidate.py222
-rw-r--r--BaseTools/Source/Python/UPT/Library/Parsing.py52
-rw-r--r--BaseTools/Source/Python/UPT/Library/UniClassObject.py178
-rw-r--r--BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py12
-rw-r--r--BaseTools/Source/Python/UPT/Library/Xml/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/Library/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/Logger/Log.py18
-rw-r--r--BaseTools/Source/Python/UPT/Logger/StringTable.py48
-rw-r--r--BaseTools/Source/Python/UPT/Logger/ToolError.py8
-rw-r--r--BaseTools/Source/Python/UPT/Logger/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/MkPkg.py56
-rw-r--r--BaseTools/Source/Python/UPT/Object/POM/CommonObject.py244
-rw-r--r--BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py276
-rw-r--r--BaseTools/Source/Python/UPT/Object/POM/PackageObject.py92
-rw-r--r--BaseTools/Source/Python/UPT/Object/POM/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/DecObject.py98
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py90
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py44
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py90
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py34
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py308
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py14
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py154
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py60
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py32
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py64
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py96
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py28
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py160
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py118
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py128
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py70
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/Object/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/Parser/DecParser.py230
-rw-r--r--BaseTools/Source/Python/UPT/Parser/DecParserMisc.py14
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py10
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py124
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py100
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py84
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py50
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py46
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py24
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py58
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfParser.py270
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfParserMisc.py68
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py90
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfSectionParser.py48
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py62
-rw-r--r--BaseTools/Source/Python/UPT/Parser/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py232
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py68
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py18
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/__init__.py10
-rw-r--r--BaseTools/Source/Python/UPT/ReplacePkg.py28
-rw-r--r--BaseTools/Source/Python/UPT/RmPkg.py42
-rw-r--r--BaseTools/Source/Python/UPT/UPT.py10
-rw-r--r--BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py464
-rw-r--r--BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py310
-rw-r--r--BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py58
-rw-r--r--BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py146
-rw-r--r--BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py66
-rw-r--r--BaseTools/Source/Python/UPT/Xml/CommonXml.py30
-rw-r--r--BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py64
-rw-r--r--BaseTools/Source/Python/UPT/Xml/IniToXml.py60
-rw-r--r--BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py18
-rw-r--r--BaseTools/Source/Python/UPT/Xml/PcdXml.py136
-rw-r--r--BaseTools/Source/Python/UPT/Xml/XmlParser.py64
-rw-r--r--BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py26
-rw-r--r--BaseTools/Source/Python/UPT/Xml/__init__.py10
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileParser.py14
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileTable.py88
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceDatabase.py24
-rw-r--r--BaseTools/Source/Python/build/BuildReport.py32
-rw-r--r--BaseTools/Source/Python/build/build.py16
-rw-r--r--BaseTools/Source/Python/sitecustomize.py2
177 files changed, 6635 insertions, 6635 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
index 6d76afd81a..b0801c787a 100644
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -241,7 +241,7 @@ class WorkspaceAutoGen(AutoGen):
super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._Init = True
-
+
## Initialize WorkspaceAutoGen
#
# @param WorkspaceDir Root directory of workspace
@@ -310,7 +310,7 @@ class WorkspaceAutoGen(AutoGen):
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
-
+
# parse FDF file to get PCDs in it, if any
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
@@ -763,7 +763,7 @@ class WorkspaceAutoGen(AutoGen):
## _CheckDuplicateInFV() method
#
- # Check whether there is duplicate modules/files exist in FV section.
+ # Check whether there is duplicate modules/files exist in FV section.
# The check base on the file GUID;
#
def _CheckDuplicateInFV(self, Fdf):
@@ -794,7 +794,7 @@ class WorkspaceAutoGen(AutoGen):
Module.Guid.upper()),
ExtraData=self.FdfFile)
#
- # Some INF files not have entity in DSC file.
+ # Some INF files not have entity in DSC file.
#
if not InfFoundFlag:
if FfsFile.InfFileName.find('$') == -1:
@@ -804,7 +804,7 @@ class WorkspaceAutoGen(AutoGen):
PathClassObj = PathClass(FfsFile.InfFileName, self.WorkspaceDir)
#
- # Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use
+ # Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use
# BuildObject from one of AutoGenObjectList is enough.
#
InfObj = self.AutoGenObjectList[0].BuildDatabase.WorkspaceDb.BuildObject[PathClassObj, TAB_ARCH_COMMON, self.BuildTarget, self.ToolChain]
@@ -823,7 +823,7 @@ class WorkspaceAutoGen(AutoGen):
if FfsFile.NameGuid is not None:
#
- # If the NameGuid reference a PCD name.
+ # If the NameGuid reference a PCD name.
# The style must match: PCD(xxxx.yyy)
#
if gPCDAsGuidPattern.match(FfsFile.NameGuid):
@@ -900,7 +900,7 @@ class WorkspaceAutoGen(AutoGen):
for Pcd in Pa.Platform.Pcds:
PcdType = Pa.Platform.Pcds[Pcd].Type
- # If no PCD type, this PCD comes from FDF
+ # If no PCD type, this PCD comes from FDF
if not PcdType:
continue
@@ -992,14 +992,14 @@ class WorkspaceAutoGen(AutoGen):
## Check the PCDs token value conflict in each DEC file.
#
# Will cause build break and raise error message while two PCDs conflict.
- #
+ #
# @return None
#
def _CheckAllPcdsTokenValueConflict(self):
for Pa in self.AutoGenObjectList:
for Package in Pa.PackageList:
PcdList = Package.Pcds.values()
- PcdList.sort(lambda x, y: cmp(int(x.TokenValue, 0), int(y.TokenValue, 0)))
+ PcdList.sort(lambda x, y: cmp(int(x.TokenValue, 0), int(y.TokenValue, 0)))
Count = 0
while (Count < len(PcdList) - 1) :
Item = PcdList[Count]
@@ -1124,20 +1124,20 @@ class PlatformAutoGen(AutoGen):
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
self._Init = True
#
- # Used to store all PCDs for both PEI and DXE phase, in order to generate
+ # Used to store all PCDs for both PEI and DXE phase, in order to generate
# correct PCD database
- #
+ #
_DynaPcdList_ = []
_NonDynaPcdList_ = []
_PlatformPcds = {}
-
+
#
- # The priority list while override build option
+ # The priority list while override build option
#
PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
"0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
"0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
- "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
@@ -1289,9 +1289,9 @@ class PlatformAutoGen(AutoGen):
#
def CollectFixedAtBuildPcds(self):
for LibAuto in self.LibraryAutoGenList:
- FixedAtBuildPcds = {}
- ShareFixedAtBuildPcdsSameValue = {}
- for Module in LibAuto._ReferenceModules:
+ FixedAtBuildPcds = {}
+ ShareFixedAtBuildPcdsSameValue = {}
+ for Module in LibAuto._ReferenceModules:
for Pcd in Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if key not in FixedAtBuildPcds:
@@ -1299,7 +1299,7 @@ class PlatformAutoGen(AutoGen):
FixedAtBuildPcds[key] = Pcd.DefaultValue
else:
if FixedAtBuildPcds[key] != Pcd.DefaultValue:
- ShareFixedAtBuildPcdsSameValue[key] = False
+ ShareFixedAtBuildPcdsSameValue[key] = False
for Pcd in LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
@@ -1308,7 +1308,7 @@ class PlatformAutoGen(AutoGen):
DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
continue
- if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
+ if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]
def CollectVariables(self, DynamicPcdSet):
@@ -1405,7 +1405,7 @@ class PlatformAutoGen(AutoGen):
for F in self.Platform.Modules.keys():
M = ModuleAutoGen(self.Workspace, F, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile)
#GuidValue.update(M.Guids)
-
+
self.Platform.Modules[F].M = M
for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
@@ -1417,27 +1417,27 @@ class PlatformAutoGen(AutoGen):
if M.IsBinaryModule == True:
PcdFromModule.IsFromBinaryInf = True
- # Check the PCD from DSC or not
+ # Check the PCD from DSC or not
PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
if F.Path not in FdfModuleList:
- # If one of the Source built modules listed in the DSC is not listed
- # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
- # access method (it is only listed in the DEC file that declares the
+ # If one of the Source built modules listed in the DSC is not listed
+ # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
+ # access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamic), then build tool will report warning message
- # notify the PI that they are attempting to build a module that must
- # be included in a flash image in order to be functional. These Dynamic
- # PCD will not be added into the Database unless it is used by other
+ # notify the PI that they are attempting to build a module that must
+ # be included in a flash image in order to be functional. These Dynamic
+ # PCD will not be added into the Database unless it is used by other
# modules that are included in the FDF file.
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \
PcdFromModule.IsFromBinaryInf == False:
# Print warning message to let the developer make a determine.
continue
- # If one of the Source built modules listed in the DSC is not listed in
- # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
- # access method (it is only listed in the DEC file that declares the
- # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
+ # If one of the Source built modules listed in the DSC is not listed in
+ # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
+ # access method (it is only listed in the DEC file that declares the
+ # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
# PCD to the Platform's PCD Database.
if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
continue
@@ -1465,14 +1465,14 @@ class PlatformAutoGen(AutoGen):
PcdFromModule.Pending = False
self._NonDynaPcdList_.append (PcdFromModule)
DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}
- # add the PCD from modules that listed in FDF but not in DSC to Database
+ # add the PCD from modules that listed in FDF but not in DSC to Database
for InfName in FdfModuleList:
if InfName not in DscModuleSet:
InfClass = PathClass(InfName)
M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
- # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
- # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
- # For binary module, if in current arch, we need to list the PCDs into database.
+ # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
+ # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
+ # For binary module, if in current arch, we need to list the PCDs into database.
if not M.IsSupportedArch:
continue
# Override the module PCD setting by platform setting
@@ -1497,20 +1497,20 @@ class PlatformAutoGen(AutoGen):
self._NonDynaPcdList_.append(PcdFromModule)
if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
# Overwrite the phase of any the same PCD existing, if Phase is PEI.
- # It is to solve the case that a dynamic PCD used by a PEM module/PEI
+ # It is to solve the case that a dynamic PCD used by a PEM module/PEI
# module & DXE module at a same time.
# Overwrite the type of the PCDs in source INF by the type of AsBuild
- # INF file as DynamicEx.
+ # INF file as DynamicEx.
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
self._DynaPcdList_[Index].Type = PcdFromModule.Type
for PcdFromModule in self._NonDynaPcdList_:
- # If a PCD is not listed in the DSC file, but binary INF files used by
- # this platform all (that use this PCD) list the PCD in a [PatchPcds]
- # section, AND all source INF files used by this platform the build
- # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
+ # If a PCD is not listed in the DSC file, but binary INF files used by
+ # this platform all (that use this PCD) list the PCD in a [PatchPcds]
+ # section, AND all source INF files used by this platform the build
+ # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
# section, then the tools must NOT add the PCD to the Platform's PCD
- # Database; the build must assign the access method for this PCD as
+ # Database; the build must assign the access method for this PCD as
# PcdsPatchableInModule.
if PcdFromModule not in self._DynaPcdList_:
continue
@@ -1533,7 +1533,7 @@ class PlatformAutoGen(AutoGen):
self._DynamicPcdList = self._DynaPcdList_
#
# Sort dynamic PCD list to:
- # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
+ # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
# try to be put header of dynamicd List
# 2) If PCD is HII type, the PCD item should be put after unicode type PCD
#
@@ -1554,7 +1554,7 @@ class PlatformAutoGen(AutoGen):
if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
self._PlatformPcds[item].DatumType = TAB_VOID
- if (self.Workspace.ArchList[-1] == self.Arch):
+ if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList.values()[0]
@@ -1637,7 +1637,7 @@ class PlatformAutoGen(AutoGen):
#
# Fix the PCDs define in VPD PCD section that never referenced by module.
# An example is PCD for signature usage.
- #
+ #
for DscPcd in PlatformPcds:
DscPcdEntry = self._PlatformPcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
@@ -1659,8 +1659,8 @@ class PlatformAutoGen(AutoGen):
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
for (SkuName, Sku) in SkuObjList:
- Sku.VpdOffset = Sku.VpdOffset.strip()
-
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
# Need to iterate DEC pcd information to get the value & datumtype
for eachDec in self.PackageList:
for DecPcd in eachDec.Pcds:
@@ -1671,8 +1671,8 @@ class PlatformAutoGen(AutoGen):
EdkLogger.warn("build", "Unreferenced vpd pcd used!",
File=self.MetaFile, \
ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
- %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
-
+ %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
+
DscPcdEntry.DatumType = DecPcdEntry.DatumType
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
@@ -1680,7 +1680,7 @@ class PlatformAutoGen(AutoGen):
# Only fix the value while no value provided in DSC file.
if not Sku.DefaultValue:
DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
-
+
if DscPcdEntry not in self._DynamicPcdList:
self._DynamicPcdList.append(DscPcdEntry)
Sku.VpdOffset = Sku.VpdOffset.strip()
@@ -1711,7 +1711,7 @@ class PlatformAutoGen(AutoGen):
VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku)
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
- NeedProcessVpdMapFile = True
+ NeedProcessVpdMapFile = True
if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):
UnicodePcdArray.add(DscPcdEntry)
elif len(Sku.VariableName) > 0:
@@ -1723,7 +1723,7 @@ class PlatformAutoGen(AutoGen):
VpdSkuMap[DscPcd] = SkuValueMap
if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
if VpdFile.GetCount() != 0:
@@ -2071,9 +2071,9 @@ class PlatformAutoGen(AutoGen):
self._PcdTokenNumber = OrderedDict()
TokenNumber = 1
#
- # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
+ # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
# Such as:
- #
+ #
# Dynamic PCD:
# TokenNumber 0 ~ 10
# DynamicEx PCD:
@@ -2388,7 +2388,7 @@ class PlatformAutoGen(AutoGen):
# @param Options Options to be expanded
#
# @retval options Options expanded
- #
+ #
def _ExpandBuildOption(self, Options, ModuleStyle=None):
BuildOptions = {}
FamilyMatch = False
@@ -2414,9 +2414,9 @@ class PlatformAutoGen(AutoGen):
if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
-
+
#
- # Use the highest priority value.
+ # Use the highest priority value.
#
if (len(OverrideList) >= 2):
KeyList = OverrideList.keys()
@@ -2427,7 +2427,7 @@ class PlatformAutoGen(AutoGen):
NextKey = KeyList[Index1 + Index + 1]
#
# Compare two Key, if one is included by another, choose the higher priority one
- #
+ #
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
if (Target1 == Target2 or Target1 == "*" or Target2 == "*") and\
(ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*") and\
@@ -2441,7 +2441,7 @@ class PlatformAutoGen(AutoGen):
else:
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
-
+
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
@@ -2639,7 +2639,7 @@ class ModuleAutoGen(AutoGen):
% (MetaFile, Arch))
return None
return obj
-
+
## Initialize ModuleAutoGen
#
# @param Workspace EdkIIWorkspaceBuild object
@@ -2737,13 +2737,13 @@ class ModuleAutoGen(AutoGen):
self.AutoGenDepSet = set()
-
+
## The Modules referenced to this Library
# Only Library has this attribute
- self._ReferenceModules = []
-
+ self._ReferenceModules = []
+
## Store the FixedAtBuild Pcds
- #
+ #
self._FixedAtBuildPcds = []
self.ConstPcd = {}
@@ -2759,8 +2759,8 @@ class ModuleAutoGen(AutoGen):
continue
if Pcd not in self._FixedAtBuildPcds:
self._FixedAtBuildPcds.append(Pcd)
-
- return self._FixedAtBuildPcds
+
+ return self._FixedAtBuildPcds
def _GetUniqueBaseName(self):
BaseName = self.Name
@@ -2959,7 +2959,7 @@ class ModuleAutoGen(AutoGen):
continue
PackageList.append(Package)
return PackageList
-
+
## Get the depex string
#
# @return : a string contain all depex expresion.
@@ -2988,7 +2988,7 @@ class ModuleAutoGen(AutoGen):
(Arch.upper() == self.Arch.upper() and \
ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
DepexList.append({(Arch, ModuleType): DepexExpr})
-
+
#the type of build module is USER_DEFINED.
if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:
for Depex in DepexList:
@@ -2999,7 +2999,7 @@ class ModuleAutoGen(AutoGen):
if not DepexStr:
return '[Depex.%s]\n' % self.Arch
return DepexStr
-
+
#the type of build module not is USER_DEFINED.
Count = 0
for Depex in DepexList:
@@ -3019,7 +3019,7 @@ class ModuleAutoGen(AutoGen):
if not DepexStr:
return '[Depex.%s]\n' % self.Arch
return '[Depex.%s]\n# ' % self.Arch + DepexStr
-
+
## Merge dependency expression
#
# @retval list The token list of the dependency expression after parsed
@@ -3155,14 +3155,14 @@ class ModuleAutoGen(AutoGen):
#
self._BuildOptionIncPathList = []
return self._BuildOptionIncPathList
-
+
BuildOptionIncPathList = []
for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
try:
FlagOption = self.BuildOption[Tool]['FLAGS']
except KeyError:
FlagOption = ''
-
+
if self.PlatformInfo.ToolChainFamily != 'RVCT':
IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
else:
@@ -3175,7 +3175,7 @@ class ModuleAutoGen(AutoGen):
IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
#
- # EDK II modules must not reference header files outside of the packages they depend on or
+ # EDK II modules must not reference header files outside of the packages they depend on or
# within the module's directory tree. Report error if violation.
#
if self.AutoGenVersion >= 0x00010005:
@@ -3187,13 +3187,13 @@ class ModuleAutoGen(AutoGen):
ExtraData=ErrMsg,
File=str(self.MetaFile))
-
+
BuildOptionIncPathList += IncPathList
-
+
self._BuildOptionIncPathList = BuildOptionIncPathList
-
+
return self._BuildOptionIncPathList
-
+
## Return a list of files which can be built from source
#
# What kind of files can be built is determined by build rules in
@@ -3247,7 +3247,7 @@ class ModuleAutoGen(AutoGen):
Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
for Ext in Order_Dict[F][1:]:
RemoveList.append(F + Ext)
-
+
for item in RemoveList:
FileList.remove(item)
@@ -3690,12 +3690,12 @@ class ModuleAutoGen(AutoGen):
for SourceFile in self.Module.Sources:
if SourceFile.Type.upper() == ".VFR" :
#
- # search the .map file to find the offset of vfr binary in the PE32+/TE file.
+ # search the .map file to find the offset of vfr binary in the PE32+/TE file.
#
VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
elif SourceFile.Type.upper() == ".UNI" :
#
- # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
+ # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
#
VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
@@ -3727,7 +3727,7 @@ class ModuleAutoGen(AutoGen):
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
- fStringIO.write(''.join(UniGuid))
+ fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@@ -3738,13 +3738,13 @@ class ModuleAutoGen(AutoGen):
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
- fStringIO.write(''.join(VfrGuid))
+ fStringIO.write(''.join(VfrGuid))
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
# write data into file.
#
- try :
+ try :
fInputfile.write (fStringIO.getvalue())
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
@@ -3764,15 +3764,15 @@ class ModuleAutoGen(AutoGen):
if self.IsAsBuiltInfCreated:
return
-
+
# Skip the following code for EDK I inf
if self.AutoGenVersion < 0x00010005:
return
-
+
# Skip the following code for libraries
if self.IsLibrary:
return
-
+
# Skip the following code for modules with no source files
if not self.SourceFileList:
return
@@ -3780,7 +3780,7 @@ class ModuleAutoGen(AutoGen):
# Skip the following code for modules without any binary files
if self.BinaryFileList:
return
-
+
### TODO: How to handles mixed source and binary modules
# Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
@@ -4054,7 +4054,7 @@ class ModuleAutoGen(AutoGen):
UsageIndex = Index
break
if UsageIndex != -1:
- PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
+ PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
else:
PcdCommentList.append('## UNDEFINED ' + HiiInfo)
PcdComments = '\n '.join(PcdCommentList)
@@ -4069,7 +4069,7 @@ class ModuleAutoGen(AutoGen):
# Generated LibraryClasses section in comments.
for Library in self.LibraryAutoGenList:
AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
-
+
# Generated UserExtensions TianoCore section.
# All tianocore user extensions are copied.
UserExtStr = ''
@@ -4083,12 +4083,12 @@ class ModuleAutoGen(AutoGen):
# Generated depex expression section in comments.
DepexExpresion = self._GetDepexExpresionString()
AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''
-
+
AsBuiltInf = TemplateString()
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
-
+
SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
-
+
self.IsAsBuiltInfCreated = True
if GlobalData.gBinCacheDest:
self.CopyModuleToCache()
@@ -4408,7 +4408,7 @@ class ModuleAutoGen(AutoGen):
BuildOption = property(_GetModuleBuildOption)
BuildOptionIncPathList = property(_GetBuildOptionIncPathList)
BuildCommand = property(_GetBuildCommand)
-
+
FixedAtBuildPcds = property(_GetFixedAtBuildPcds)
UniqueBaseName = property(_GetUniqueBaseName)
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
index e205589c6b..8a32343846 100644
--- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
+++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py
@@ -360,7 +360,7 @@ class BuildRule:
# Clean up the line and replace path separator with native one
Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep)
self.RuleContent[Index] = Line
-
+
# find the build_rule_version
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) != -1:
if Line.find("=") != -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py
index 5c3552a773..eac41ed9bf 100644
--- a/BaseTools/Source/Python/AutoGen/GenC.py
+++ b/BaseTools/Source/Python/AutoGen/GenC.py
@@ -784,7 +784,7 @@ gModuleTypeHeaderFile = {
SUP_MODULE_USER_DEFINED : [gBasicHeaderFile]
}
-## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
+## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
# the TokenName and Guid comparison to avoid define name collisions.
#
# @param Info The ModuleAutoGen object
@@ -804,7 +804,7 @@ def DynExPcdTokenNumberMapping(Info, AutoGenH):
return
AutoGenH.Append('\n#define COMPAREGUID(Guid1, Guid2) (BOOLEAN)(*(CONST UINT64*)Guid1 == *(CONST UINT64*)Guid2 && *((CONST UINT64*)Guid1 + 1) == *((CONST UINT64*)Guid2 + 1))\n')
# AutoGen for each PCD listed in a [PcdEx] section of a Module/Lib INF file.
- # Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
+ # Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
# Use the Guid pointer to see if it matches any of the token space GUIDs.
TokenCNameList = set()
for TokenCName in ExTokenCNameList:
@@ -822,15 +822,15 @@ def DynExPcdTokenNumberMapping(Info, AutoGenH):
Index = Index + 1
if Index == 1:
AutoGenH.Append('\n#define __PCD_%s_ADDR_CMP(GuidPtr) (' % (RealTokenCName))
- AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
+ AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
else:
- AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
+ AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
if Index == Count:
AutoGenH.Append('0 \\\n )\n')
TokenCNameList.add(TokenCName)
-
+
TokenCNameList = set()
for TokenCName in ExTokenCNameList:
if TokenCName in TokenCNameList:
@@ -848,14 +848,14 @@ def DynExPcdTokenNumberMapping(Info, AutoGenH):
if Index == 1:
AutoGenH.Append('\n#define __PCD_%s_VAL_CMP(GuidPtr) (' % (RealTokenCName))
AutoGenH.Append('\\\n (GuidPtr == NULL) ? 0:')
- AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
+ AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
else:
- AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
+ AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
if Index == Count:
AutoGenH.Append('0 \\\n )\n')
- # Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
+ # Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
# Guid2 is a C name for a GUID. Compare pointers first because optimizing compiler
# can do this at build time on CONST GUID pointers and optimize away call to COMPAREGUID().
# COMPAREGUID() will only be used if the Guid passed in is local to the module.
@@ -890,22 +890,22 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if Pcd.PcdValueFromComm:
Pcd.DefaultValue = Pcd.PcdValueFromComm
-
+
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
TokenNumber = int(Pcd.TokenValue, 0)
- # Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
+ # Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
# different Guids but same TokenCName
PcdExTokenName = '_PCD_TOKEN_' + Pcd.TokenSpaceGuidCName + '_' + TokenCName
AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
else:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
- # If one of the Source built modules listed in the DSC is not listed in FDF modules,
- # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
- # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
- # report warning message notify the PI that they are attempting to build a module
- # that must be included in a flash image in order to be functional. These Dynamic PCD
- # will not be added into the Database unless it is used by other modules that are
- # included in the FDF file.
+ # If one of the Source built modules listed in the DSC is not listed in FDF modules,
+ # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
+ # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
+ # report warning message notify the PI that they are attempting to build a module
+ # that must be included in a flash image in order to be functional. These Dynamic PCD
+ # will not be added into the Database unless it is used by other modules that are
+ # included in the FDF file.
# In this case, just assign an invalid token number to make it pass build.
if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
TokenNumber = 0
@@ -929,7 +929,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
-
+
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
@@ -1044,7 +1044,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
"Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
if not Value.endswith('U'):
- Value += 'U'
+ Value += 'U'
elif Pcd.DatumType == TAB_UINT8:
if ValueNumber < 0:
EdkLogger.error("build", AUTOGEN_ERROR,
@@ -1102,7 +1102,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdValueName = '_PCD_PATCHABLE_VALUE_' + TokenCName
else:
PcdValueName = '_PCD_VALUE_' + TokenCName
-
+
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
#
# For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
@@ -1115,7 +1115,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
-
+
PcdDataSize = Pcd.GetPcdSize()
if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
@@ -1132,10 +1132,10 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
-
+
PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
-
+
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
@@ -1143,7 +1143,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
-
+
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
@@ -1190,13 +1190,13 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
TokenNumber = int(Pcd.TokenValue, 0)
else:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
- # If one of the Source built modules listed in the DSC is not listed in FDF modules,
- # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
- # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
- # report warning message notify the PI that they are attempting to build a module
- # that must be included in a flash image in order to be functional. These Dynamic PCD
- # will not be added into the Database unless it is used by other modules that are
- # included in the FDF file.
+ # If one of the Source built modules listed in the DSC is not listed in FDF modules,
+ # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
+ # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
+ # report warning message notify the PI that they are attempting to build a module
+ # that must be included in a flash image in order to be functional. These Dynamic PCD
+ # will not be added into the Database unless it is used by other modules that are
+ # included in the FDF file.
# In this case, just assign an invalid token number to make it pass build.
if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
TokenNumber = 0
@@ -1230,7 +1230,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
if PcdItemType in PCD_DYNAMIC_EX_TYPE_SET:
PcdExTokenName = '_PCD_TOKEN_' + TokenSpaceGuidCName + '_' + TokenCName
AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
-
+
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
else:
@@ -1312,7 +1312,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
-
+
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
@@ -1323,7 +1323,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
-
+
ConstFixedPcd = False
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and (key in Info.ConstPcd or (Info.IsLibrary and not Info._ReferenceModules)):
ConstFixedPcd = True
@@ -1656,7 +1656,7 @@ def CreatePcdCode(Info, AutoGenC, AutoGenH):
for Pcd in Info.ModulePcdList:
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
TokenSpaceList.append(Pcd.TokenSpaceGuidCName)
-
+
SkuMgr = Info.Workspace.Platform.SkuIdMgr
AutoGenH.Append("\n// Definition of SkuId Array\n")
AutoGenH.Append("extern UINT64 _gPcd_SkuId_Array[];\n")
@@ -1666,7 +1666,7 @@ def CreatePcdCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_BASE]:
GuidType = TAB_GUID
else:
- GuidType = "EFI_GUID"
+ GuidType = "EFI_GUID"
for Item in TokenSpaceList:
AutoGenH.Append('extern %s %s;\n' % (GuidType, Item))
@@ -2016,7 +2016,7 @@ def CreateHeaderCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in gModuleTypeHeaderFile:
AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0])
#
- # if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
+ # if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
# As if modules only uses FixedPcd, then PcdLib is not needed in [LibraryClasses] section.
#
if 'PcdLib' in Info.Module.LibraryClasses or Info.Module.Pcds:
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
index 9fcd7fcc97..c90b814e7d 100644
--- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py
+++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
@@ -182,10 +182,10 @@ typedef struct {
//UINT32 UninitDataBaseSize;// Total size for PCD those default value with 0.
//TABLE_OFFSET LocalTokenNumberTableOffset;
//TABLE_OFFSET ExMapTableOffset;
- //TABLE_OFFSET GuidTableOffset;
+ //TABLE_OFFSET GuidTableOffset;
//TABLE_OFFSET StringTableOffset;
//TABLE_OFFSET SizeTableOffset;
- //TABLE_OFFSET SkuIdTableOffset;
+ //TABLE_OFFSET SkuIdTableOffset;
//TABLE_OFFSET PcdNameTableOffset;
//UINT16 LocalTokenCount; // LOCAL_TOKEN_NUMBER for all
//UINT16 ExTokenCount; // EX_TOKEN_NUMBER for DynamicEx
@@ -237,11 +237,11 @@ ${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
## DbItemList
#
-# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
+# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
# When the structure is changed, remember to check the ItemSize and the related PackStr in PackData()
-# RawDataList is the RawData that may need some kind of calculation or transformation,
+# RawDataList is the RawData that may need some kind of calculation or transformation,
# the DataList corresponds to the data that need to be written to database. If DataList is not present, then RawDataList
-# will be written to the database.
+# will be written to the database.
#
class DbItemList:
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -309,7 +309,7 @@ class DbItemList:
## DbExMapTblItemList
#
-# The class holds the ExMap table
+# The class holds the ExMap table
#
class DbExMapTblItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -319,15 +319,15 @@ class DbExMapTblItemList (DbItemList):
Buffer = ''
PackStr = "=LHH"
for Datas in self.RawDataList:
- Buffer += pack(PackStr,
+ Buffer += pack(PackStr,
GetIntegerValue(Datas[0]),
GetIntegerValue(Datas[1]),
- GetIntegerValue(Datas[2]))
+ GetIntegerValue(Datas[2]))
return Buffer
## DbComItemList
#
-# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
+# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
# ItemSize multiply the ItemCount.
#
class DbComItemList (DbItemList):
@@ -345,7 +345,7 @@ class DbComItemList (DbItemList):
else:
assert(Index < len(self.RawDataList))
for ItemIndex in xrange(Index):
- Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
+ Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
return Offset
@@ -373,12 +373,12 @@ class DbComItemList (DbItemList):
Buffer += pack(PackStr, GetIntegerValue(SingleData))
else:
Buffer += pack(PackStr, GetIntegerValue(Data))
-
+
return Buffer
## DbVariableTableItemList
#
-# The class holds the Variable header value table
+# The class holds the Variable header value table
#
class DbVariableTableItemList (DbComItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -389,7 +389,7 @@ class DbVariableTableItemList (DbComItemList):
Buffer = ''
for DataList in self.RawDataList:
for Data in DataList:
- Buffer += pack(PackStr,
+ Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]),
GetIntegerValue(Data[2]),
@@ -402,7 +402,7 @@ class DbVariableTableItemList (DbComItemList):
class DbStringHeadTableItemList(DbItemList):
def __init__(self,ItemSize,DataList=None,RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
-
+
def GetInterOffset(self, Index):
Offset = 0
if self.ItemSize == 0:
@@ -435,11 +435,11 @@ class DbStringHeadTableItemList(DbItemList):
self.ListSize += len(Datas) * self.ItemSize
else:
self.ListSize += self.ItemSize
- return self.ListSize
+ return self.ListSize
## DbSkuHeadTableItemList
#
-# The class holds the Sku header value table
+# The class holds the Sku header value table
#
class DbSkuHeadTableItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -449,14 +449,14 @@ class DbSkuHeadTableItemList (DbItemList):
PackStr = "=LL"
Buffer = ''
for Data in self.RawDataList:
- Buffer += pack(PackStr,
+ Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]))
return Buffer
## DbSizeTableItemList
#
-# The class holds the size table
+# The class holds the size table
#
class DbSizeTableItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -471,16 +471,16 @@ class DbSizeTableItemList (DbItemList):
PackStr = "=H"
Buffer = ''
for Data in self.RawDataList:
- Buffer += pack(PackStr,
+ Buffer += pack(PackStr,
GetIntegerValue(Data[0]))
for subData in Data[1]:
- Buffer += pack(PackStr,
+ Buffer += pack(PackStr,
GetIntegerValue(subData))
return Buffer
## DbStringItemList
#
-# The class holds the string table
+# The class holds the string table
#
class DbStringItemList (DbComItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None, LenList=None):
@@ -490,7 +490,7 @@ class DbStringItemList (DbComItemList):
RawDataList = []
if LenList is None:
LenList = []
-
+
assert(len(RawDataList) == len(LenList))
DataList = []
# adjust DataList according to the LenList
@@ -549,7 +549,7 @@ def GetMatchedIndex(Key1, List1, Key2, List2):
return Index
else:
StartPos = Index + 1
-
+
return -1
@@ -557,7 +557,7 @@ def GetMatchedIndex(Key1, List1, Key2, List2):
# to List like [0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00]
#
# @param StringArray A string array like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
-#
+#
# @retval A list object of integer items
#
def StringArrayToList(StringArray):
@@ -569,7 +569,7 @@ def StringArrayToList(StringArray):
## Convert TokenType String like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII" to TokenType value
#
# @param TokenType A TokenType string like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII"
-#
+#
# @retval A integer representation of the TokenType
#
def GetTokenTypeValue(TokenType):
@@ -596,7 +596,7 @@ def GetTokenTypeValue(TokenType):
## construct the external Pcd database using data from Dict
#
# @param Dict A dictionary contains Pcd related tables
-#
+#
# @retval Buffer A byte stream of the Pcd database
#
def BuildExDataBase(Dict):
@@ -625,26 +625,26 @@ def BuildExDataBase(Dict):
NumberOfSkuEnabledPcd = GetIntegerValue(Dict['SKU_HEAD_SIZE'])
Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
-
+
StringTableValue = Dict['STRING_TABLE_DB_VALUE']
# when calcute the offset, should use StringTableLen instead of StringTableValue, as string maxium len may be different with actual len
StringTableLen = Dict['STRING_TABLE_LENGTH']
DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
-
+
PcdTokenTable = Dict['PCD_TOKENSPACE']
PcdTokenLen = Dict['PCD_TOKENSPACE_LENGTH']
PcdTokenTableValue = [StringArrayToList(x) for x in Dict['PCD_TOKENSPACE']]
DbPcdTokenTable = DbStringItemList(0, RawDataList = PcdTokenTableValue, LenList = PcdTokenLen)
-
+
PcdCNameTable = Dict['PCD_CNAME']
PcdCNameLen = Dict['PCD_CNAME_LENGTH']
PcdCNameTableValue = [StringArrayToList(x) for x in Dict['PCD_CNAME']]
DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
-
+
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
-
+
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
@@ -663,7 +663,7 @@ def BuildExDataBase(Dict):
DbSkuidValue = DbItemList(8, RawDataList = SkuidValue)
-
+
# Unit Db Items
UnInitValueUint64 = Dict['UNINIT_GUID_DECL_UINT64']
DbUnInitValueUint64 = DbItemList(8, RawDataList = UnInitValueUint64)
@@ -676,12 +676,12 @@ def BuildExDataBase(Dict):
UnInitValueBoolean = Dict['UNINIT_GUID_DECL_BOOLEAN']
DbUnInitValueBoolean = DbItemList(1, RawDataList = UnInitValueBoolean)
PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
-
+
DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
"LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable", "VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
"SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
"VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
-
+
DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable, VariableTable, StringTableLen, PcdTokenTable, PcdCNameTable,
SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
@@ -690,21 +690,21 @@ def BuildExDataBase(Dict):
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable, DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
-
+
# VardefValueBoolean is the last table in the init table items
InitTableNum = DbNameTotle.index("VardefValueBoolean") + 1
# The FixedHeader length of the PCD_DATABASE_INIT, from Signature to Pad
FixedHeaderLen = 80
- # Get offset of SkuId table in the database
+ # Get offset of SkuId table in the database
SkuIdTableOffset = FixedHeaderLen
for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is SkuidValue:
break
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
-
-
- # Get offset of SkuValue table in the database
+
+
+ # Get offset of SkuValue table in the database
# Fix up the LocalTokenNumberTable, SkuHeader table
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
@@ -725,11 +725,11 @@ def BuildExDataBase(Dict):
TokenTypeValue = GetTokenTypeValue(TokenTypeValue)
LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
# if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
-
-
-
- # resolve variable table offset
+
+
+
+ # resolve variable table offset
for VariableEntries in VariableTable:
skuindex = 0
for VariableEntryPerSku in VariableEntries:
@@ -747,7 +747,7 @@ def BuildExDataBase(Dict):
else:
assert(False)
if isinstance(VariableRefTable[0], list):
- DbOffset += skuindex * 4
+ DbOffset += skuindex * 4
skuindex += 1
if DbIndex >= InitTableNum:
assert(False)
@@ -775,17 +775,17 @@ def BuildExDataBase(Dict):
DbTotalLength += DbItemTotal[DbIndex].GetListSize()
if not Dict['PCD_INFO_FLAG']:
- DbPcdNameOffset = 0
+ DbPcdNameOffset = 0
LocalTokenCount = GetIntegerValue(Dict['LOCAL_TOKEN_NUMBER'])
ExTokenCount = GetIntegerValue(Dict['EX_TOKEN_NUMBER'])
GuidTableCount = GetIntegerValue(Dict['GUID_TABLE_SIZE'])
SystemSkuId = GetIntegerValue(Dict['SYSTEM_SKU_ID_VALUE'])
Pad = 0xDA
-
+
UninitDataBaseSize = 0
for Item in (DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean):
UninitDataBaseSize += Item.GetListSize()
-
+
if (DbTotalLength - UninitDataBaseSize) % 8:
DbTotalLength += (8 - (DbTotalLength - UninitDataBaseSize) % 8)
# Construct the database buffer
@@ -812,7 +812,7 @@ def BuildExDataBase(Dict):
Buffer += b
b = pack('=L', ExMapTableOffset)
-
+
Buffer += b
b = pack('=L', GuidTableOffset)
@@ -836,7 +836,7 @@ def BuildExDataBase(Dict):
Buffer += b
b = pack('=H', GuidTableCount)
-
+
Buffer += b
b = pack('=B', Pad)
Buffer += b
@@ -845,18 +845,18 @@ def BuildExDataBase(Dict):
Buffer += b
Buffer += b
Buffer += b
-
+
Index = 0
for Item in DbItemTotal:
Index +=1
b = Item.PackData()
- Buffer += b
+ Buffer += b
if Index == InitTableNum:
if len(Buffer) % 8:
for num in range(8 - len(Buffer) % 8):
b = pack('=B', Pad)
Buffer += b
- break
+ break
return Buffer
## Create code for PCD database
@@ -1010,7 +1010,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
'SYSTEM_SKU_ID_VALUE' : '0U'
}
-
+
SkuObj = Platform.Platform.SkuIdMgr
Dict['SYSTEM_SKU_ID_VALUE'] = 0 if SkuObj.SkuUsageType == SkuObj.SINGLE else Platform.Platform.SkuIds[SkuObj.SystemSkuId][0]
@@ -1028,7 +1028,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
Dict[Init+'_VALUE_' + DatumType] = []
Dict[Init+'_DB_VALUE_'+DatumType] = []
-
+
for Type in ['STRING_HEAD', 'VPD_HEAD', 'VARIABLE_HEAD']:
Dict[Type + '_CNAME_DECL'] = []
Dict[Type + '_GUID_DECL'] = []
@@ -1038,7 +1038,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['STRING_DB_VALUE'] = []
Dict['VPD_DB_VALUE'] = []
Dict['VARIABLE_DB_VALUE'] = []
-
+
Dict['STRING_TABLE_INDEX'] = []
Dict['STRING_TABLE_LENGTH'] = []
Dict['STRING_TABLE_CNAME'] = []
@@ -1061,19 +1061,19 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = []
Dict['VARIABLE_DB_VALUE'] = []
-
+
Dict['PCD_TOKENSPACE'] = []
- Dict['PCD_CNAME'] = []
+ Dict['PCD_CNAME'] = []
Dict['PCD_TOKENSPACE_LENGTH'] = []
Dict['PCD_CNAME_LENGTH'] = []
Dict['PCD_TOKENSPACE_OFFSET'] = []
Dict['PCD_CNAME_OFFSET'] = []
Dict['PCD_TOKENSPACE_MAP'] = []
Dict['PCD_NAME_OFFSET'] = []
-
+
Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] = {}
PCD_STRING_INDEX_MAP = {}
-
+
StringTableIndex = 0
StringTableSize = 0
NumberOfLocalTokens = 0
@@ -1141,8 +1141,8 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if len(Pcd.SkuInfoList) > 1:
NumberOfSkuEnabledPcd += 1
-
- SkuIdIndex = 1
+
+ SkuIdIndex = 1
VariableHeadList = []
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
@@ -1150,9 +1150,9 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if SkuId is None or SkuId == '':
continue
-
+
SkuIdIndex += 1
-
+
if len(Sku.VariableName) > 0:
VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
@@ -1203,7 +1203,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
VariableHeadList.append(VariableHeadStringIndex)
-
+
VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
# store VariableGuid to GuidTable and get the VariableHeadGuidIndex
@@ -1214,11 +1214,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s), %dU, %sU' %
- (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
+ (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
VariableHeadGuidIndex, Sku.VariableOffset))
else:
VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s), %dU, %sU' %
- (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
+ (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
VariableHeadGuidIndex, Sku.VariableOffset))
Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid)
@@ -1231,7 +1231,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
# warning under linux building environment.
#
Dict['VARDEF_DB_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
-
+
if Pcd.DatumType == TAB_UINT64:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
@@ -1264,13 +1264,13 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Pcd.InitString = 'INIT'
VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
VpdDbOffsetList.append(Sku.VpdOffset)
- # Also add the VOID* string of VPD PCD to SizeTable
+ # Also add the VOID* string of VPD PCD to SizeTable
if Pcd.DatumType == TAB_VOID:
NumberOfSizeItems += 1
# For VPD type of PCD, its current size is equal to its MAX size.
- VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
+ VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
continue
-
+
if Pcd.DatumType == TAB_VOID:
Pcd.TokenTypeList.append('PCD_TYPE_STRING')
Pcd.InitString = 'INIT'
@@ -1297,7 +1297,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue.split(","))
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
-
+
StringHeadOffsetList.append(str(StringTableSize) + 'U')
StringDbOffsetList.append(StringTableSize)
if Pcd.MaxDatumSize != '':
@@ -1336,10 +1336,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
ValueList.append(Sku.DefaultValue + "U")
elif Pcd.DatumType == "BOOLEAN":
if Sku.DefaultValue in ["1", "0"]:
- ValueList.append(Sku.DefaultValue + "U")
+ ValueList.append(Sku.DefaultValue + "U")
else:
ValueList.append(Sku.DefaultValue)
-
+
DbValueList.append(Sku.DefaultValue)
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
@@ -1348,8 +1348,8 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
-
-
+
+
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName)
@@ -1382,7 +1382,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
else:
Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList))
Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType].append(DbValueList)
-
+
if Phase == 'PEI':
NumberOfLocalTokens = NumberOfPeiLocalTokens
if Phase == 'DXE':
@@ -1394,7 +1394,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)]
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_CNAME_LENGTH'] = [0 for x in range(NumberOfLocalTokens)]
SkuEnablePcdIndex = 0
for Pcd in ReorderedDynPcdList:
@@ -1419,7 +1419,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName))
EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase)
EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber))
-
+
#
# following four Dict items hold the information for LocalTokenNumberTable
#
@@ -1430,7 +1430,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
-
+
if Platform.Platform.PcdInfoFlag:
TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
@@ -1439,10 +1439,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
CNameBinArray = StringToArray('"' + CName + '"' )
Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
-
+
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
-
-
+
+
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
# search the Offset and Table, used by LocalTokenNumberTableOffset
@@ -1468,7 +1468,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if Pcd.InitString == 'UNINIT':
Table = Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType]
else:
- Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
+ Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'][GeneratedTokenNumber] = (Offset, Table)
#
@@ -1478,10 +1478,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
else:
Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
-
-
+
+
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
-
+
if Phase == 'DXE':
GeneratedTokenNumber += NumberOfPeiLocalTokens
#
@@ -1493,7 +1493,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
# Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
# to the EXMAPPING_TABLE.
#
-
+
Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
@@ -1504,12 +1504,12 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
TokenSpaceIndex = StringTableSize
for i in range(Dict['PCD_TOKENSPACE_MAP'][index]):
TokenSpaceIndex += Dict['PCD_TOKENSPACE_LENGTH'][i]
- Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
+ Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
for index in range(len(Dict['PCD_TOKENSPACE'])):
StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
StringTableIndex += 1
for index in range(len(Dict['PCD_CNAME'])):
- Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
+ Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
Dict['PCD_NAME_OFFSET'].append(StringTableSize)
StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
@@ -1552,15 +1552,15 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if NumberOfSizeItems != 0:
Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
-
- if NumberOfSkuEnabledPcd != 0:
+
+ if NumberOfSkuEnabledPcd != 0:
Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
-
+
for AvailableSkuNumber in SkuObj.SkuIdNumberSet:
if AvailableSkuNumber not in Dict['SKUID_VALUE']:
Dict['SKUID_VALUE'].append(AvailableSkuNumber)
Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
-
+
AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
if NumberOfLocalTokens == 0:
AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict))
@@ -1573,11 +1573,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
SizeCurLenTempList = []
SizeMaxLenTempList = []
ReOrderFlag = True
-
+
if len(Dict['SIZE_TABLE_CNAME']) == 1:
if not (Dict['SIZE_TABLE_CNAME'][0] and Dict['SIZE_TABLE_GUID'][0]):
ReOrderFlag = False
-
+
if ReOrderFlag:
for Count in range(len(Dict['TOKEN_CNAME'])):
for Count1 in range(len(Dict['SIZE_TABLE_CNAME'])):
@@ -1587,15 +1587,15 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
SizeGuidTempList.append(Dict['SIZE_TABLE_GUID'][Count1])
SizeCurLenTempList.append(Dict['SIZE_TABLE_CURRENT_LENGTH'][Count1])
SizeMaxLenTempList.append(Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count1])
-
+
for Count in range(len(Dict['SIZE_TABLE_CNAME'])):
Dict['SIZE_TABLE_CNAME'][Count] = SizeCNameTempList[Count]
Dict['SIZE_TABLE_GUID'][Count] = SizeGuidTempList[Count]
Dict['SIZE_TABLE_CURRENT_LENGTH'][Count] = SizeCurLenTempList[Count]
Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count] = SizeMaxLenTempList[Count]
-
+
AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict))
-
+
# print Phase
Buffer = BuildExDataBase(Dict)
diff --git a/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
index 2cd5a6667a..d985089738 100644
--- a/BaseTools/Source/Python/AutoGen/InfSectionParser.py
+++ b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
@@ -17,14 +17,14 @@
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from Common.DataType import *
-
+
class InfSectionParser():
def __init__(self, FilePath):
self._FilePath = FilePath
self._FileSectionDataList = []
self._ParserInf()
-
+
def _ParserInf(self):
FileLinesList = []
UserExtFind = False
@@ -32,12 +32,12 @@ class InfSectionParser():
FileLastLine = False
SectionLine = ''
SectionData = []
-
+
try:
FileLinesList = open(self._FilePath, "r", 0).readlines()
except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
-
+
for Index in range(0, len(FileLinesList)):
line = str(FileLinesList[Index]).strip()
if Index + 1 == len(FileLinesList):
@@ -52,7 +52,7 @@ class InfSectionParser():
SectionLine = line
UserExtFind = True
FindEnd = False
-
+
if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
NextLine[-1] == TAB_SECTION_END) or FileLastLine:
UserExtFind = False
@@ -60,7 +60,7 @@ class InfSectionParser():
self._FileSectionDataList.append({SectionLine: SectionData[:]})
del SectionData[:]
SectionLine = ''
-
+
# Get user extension TianoCore data
#
# @return: a list include some dictionary that key is section and value is a list contain all data.
diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py
index 0e74f3bfb7..a702ef0dd5 100644
--- a/BaseTools/Source/Python/AutoGen/StrGather.py
+++ b/BaseTools/Source/Python/AutoGen/StrGather.py
@@ -1,5 +1,5 @@
## @file
-# This file is used to parse a strings file and create or add to a string database
+# This file is used to parse a strings file and create or add to a string database
# file.
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
@@ -144,7 +144,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Str = WriteLine(Str, Line)
UnusedStr = ''
- #Group the referred/Unused STRING token together.
+ #Group the referred/Unused STRING token together.
for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
Name = StringItem.StringName
@@ -265,16 +265,16 @@ def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
PrimaryTag = Language[0:Language.find('-')].lower()
else:
PrimaryTag = Language
-
+
if len(PrimaryTag) == 3:
PrimaryTag = LangConvTable.get(PrimaryTag)
-
+
for UniLanguage in UniLanguageList:
if UniLanguage.find('-') != -1:
UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
else:
UniLanguagePrimaryTag = UniLanguage
-
+
if len(UniLanguagePrimaryTag) == 3:
UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
@@ -307,7 +307,7 @@ def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible mode
# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
-# @param FilterInfo Platform language filter information
+# @param FilterInfo Platform language filter information
#
# @retval Str: A string of .c file content
#
@@ -325,14 +325,14 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
else:
# EDK module is using ISO639-2 format filter, convert to the RFC4646 format
LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
-
+
UniLanguageList = []
for IndexI in range(len(UniObjectClass.LanguageDef)):
UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
-
-
+
+
#
# Create lines for each language's strings
#
@@ -340,7 +340,7 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
Language = UniObjectClass.LanguageDef[IndexI][0]
if Language not in UniLanguageListFiltered:
continue
-
+
StringBuffer = BytesIO()
StrStringValue = ''
ArrayLength = 0
@@ -403,7 +403,7 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
# Add an EFI_HII_SIBT_END at last
#
Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
-
+
#
# Create binary UNI string
#
@@ -458,7 +458,7 @@ def CreateCFileEnd():
# @param BaseName: The basename of strings
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible Mode
-# @param FilterInfo Platform language filter information
+# @param FilterInfo Platform language filter information
#
# @retval CFile: A string of complete .c file
#
@@ -544,7 +544,7 @@ def SearchString(UniObjectClass, FileList, IsCompatibleMode):
# This function is used for UEFI2.1 spec
#
#
-def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
+def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
if len(UniFilList) > 0:
if ShellMode:
#
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py
index 88810f1ccc..384f31b165 100644
--- a/BaseTools/Source/Python/AutoGen/UniClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -284,7 +284,7 @@ class UniFileClassObject(object):
if not IsLangInDef:
#
# The found STRING tokens will be added into new language string list
- # so that the unique STRING identifier is reserved for all languages in the package list.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
#
FirstLangName = self.LanguageDef[0][0]
if LangName != FirstLangName:
@@ -411,10 +411,10 @@ class UniFileClassObject(object):
#
# Ignore empty line
#
- if len(Line) == 0:
- continue
-
-
+ if len(Line) == 0:
+ continue
+
+
Line = Line.replace(u'/langdef', u'#langdef')
Line = Line.replace(u'/string', u'#string')
Line = Line.replace(u'/language', u'#language')
@@ -429,8 +429,8 @@ class UniFileClassObject(object):
Line = Line.replace(u'\\r', CR)
Line = Line.replace(u'\\t', u' ')
Line = Line.replace(u'\t', u' ')
- Line = Line.replace(u'\\"', u'"')
- Line = Line.replace(u"\\'", u"'")
+ Line = Line.replace(u'\\"', u'"')
+ Line = Line.replace(u"\\'", u"'")
Line = Line.replace(BACK_SLASH_PLACEHOLDER, u'\\')
StartPos = Line.find(u'\\x')
@@ -570,7 +570,7 @@ class UniFileClassObject(object):
else:
EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
% (Language, Name, self.File))
-
+
if Language not in self.OrderedStringList:
self.OrderedStringList[Language] = []
self.OrderedStringDict[Language] = {}
@@ -592,7 +592,7 @@ class UniFileClassObject(object):
for LangName in self.LanguageDef:
#
# New STRING token will be added into all language string lists.
- # so that the unique STRING identifier is reserved for all languages in the package list.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
#
if LangName[0] != Language:
if UseOtherLangDef != '':
diff --git a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
index 49fbdf3246..edd40a1498 100644
--- a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
+++ b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
@@ -24,7 +24,7 @@ from Common.DataType import *
class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
def __init__(self):
self.var_check_info = []
-
+
def push_back(self, var_check_tab):
for tab in self.var_check_info:
if tab.equal(var_check_tab):
@@ -32,9 +32,9 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
break
else:
self.var_check_info.append(var_check_tab)
-
+
def dump(self, dest, Phase):
-
+
if not os.path.isabs(dest):
return
if not os.path.exists(dest):
@@ -161,7 +161,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
b = pack("=B", var_check_tab.pad)
Buffer += b
realLength += 1
-
+
DbFile = BytesIO()
if Phase == 'DXE' and os.path.exists(BinFilePath):
BinFile = open(BinFilePath, "rb")
@@ -175,7 +175,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
Buffer = BinBuffer + Buffer
DbFile.write(Buffer)
SaveFileOnChange(BinFilePath, DbFile.getvalue(), True)
-
+
class VAR_CHECK_PCD_VARIABLE_TAB(object):
pad = 0xDA
@@ -193,26 +193,26 @@ class VAR_CHECK_PCD_VARIABLE_TAB(object):
def UpdateSize(self):
self.HeaderLength = 32 + len(self.Name.split(","))
self.Length = 32 + len(self.Name.split(",")) + self.GetValidTabLen()
-
+
def GetValidTabLen(self):
validtablen = 0
for item in self.validtab:
- validtablen += item.Length
- return validtablen
-
+ validtablen += item.Length
+ return validtablen
+
def SetAttributes(self, attributes):
self.Attributes = attributes
-
+
def push_back(self, valid_obj):
if valid_obj is not None:
self.validtab.append(valid_obj)
-
+
def equal(self, varchecktab):
if self.Guid == varchecktab.Guid and self.Name == varchecktab.Name:
return True
else:
return False
-
+
def merge(self, varchecktab):
for validobj in varchecktab.validtab:
if validobj in self.validtab:
@@ -235,10 +235,10 @@ class VAR_CHECK_PCD_VALID_OBJ(object):
except:
self.StorageWidth = 0
self.ValidData = False
-
- def __eq__(self, validObj):
+
+ def __eq__(self, validObj):
return validObj and self.VarOffset == validObj.VarOffset
-
+
class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validlist, PcdDataType):
super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
@@ -246,7 +246,7 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
valid_num_list = []
for item in self.rawdata:
valid_num_list.extend(item.split(','))
-
+
for valid_num in valid_num_list:
valid_num = valid_num.strip()
@@ -255,10 +255,10 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
else:
self.data.add(int(valid_num))
-
+
self.Length = 5 + len(self.data) * self.StorageWidth
-
-
+
+
class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validrange, PcdDataType):
super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
@@ -275,7 +275,7 @@ class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
for obj in rangelist.pop():
self.data.add((obj.start, obj.end))
self.Length = 5 + len(self.data) * 2 * self.StorageWidth
-
+
def GetValidationObject(PcdClass, VarOffset):
if PcdClass.validateranges:
diff --git a/BaseTools/Source/Python/BPDG/BPDG.py b/BaseTools/Source/Python/BPDG/BPDG.py
index 86c44abb67..07cee89762 100644
--- a/BaseTools/Source/Python/BPDG/BPDG.py
+++ b/BaseTools/Source/Python/BPDG/BPDG.py
@@ -1,12 +1,12 @@
## @file
# Intel Binary Product Data Generation Tool (Intel BPDG).
-# This tool provide a simple process for the creation of a binary file containing read-only
-# configuration data for EDK II platforms that contain Dynamic and DynamicEx PCDs described
-# in VPD sections. It also provide an option for specifying an alternate name for a mapping
-# file of PCD layout for use during the build when the platform integrator selects to use
+# This tool provide a simple process for the creation of a binary file containing read-only
+# configuration data for EDK II platforms that contain Dynamic and DynamicEx PCDs described
+# in VPD sections. It also provide an option for specifying an alternate name for a mapping
+# file of PCD layout for use during the build when the platform integrator selects to use
# automatic offset calculation.
#
-# Copyright (c) 2010 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -47,26 +47,26 @@ VERSION = (st.LBL_BPDG_VERSION + " Build " + gBUILD_VERSION)
#
def main():
global Options, Args
-
+
# Initialize log system
- EdkLogger.Initialize()
+ EdkLogger.Initialize()
Options, Args = MyOptionParser()
-
+
ReturnCode = 0
-
+
if Options.opt_verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Options.debug_level is not None:
- EdkLogger.SetLevel(Options.debug_level + 1)
+ EdkLogger.SetLevel(Options.debug_level + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
-
+
if Options.bin_filename is None:
- EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
if Options.filename is None:
- EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
Force = False
if Options.opt_force is not None:
@@ -76,8 +76,8 @@ def main():
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
else :
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
- None)
-
+ None)
+
return ReturnCode
@@ -87,8 +87,8 @@ def main():
#
# @retval options A optparse.Values object containing the parsed options
# @retval args Target of BPDG command
-#
-def MyOptionParser():
+#
+def MyOptionParser():
#
# Process command line firstly.
#
@@ -106,10 +106,10 @@ def MyOptionParser():
parser.add_option('-o', '--vpd-filename', action='store', dest='bin_filename',
help=st.MSG_OPTION_VPD_FILENAME)
parser.add_option('-m', '--map-filename', action='store', dest='filename',
- help=st.MSG_OPTION_MAP_FILENAME)
+ help=st.MSG_OPTION_MAP_FILENAME)
parser.add_option('-f', '--force', action='store_true', dest='opt_force',
- help=st.MSG_OPTION_FORCE)
-
+ help=st.MSG_OPTION_FORCE)
+
(options, args) = parser.parse_args()
if len(args) == 0:
EdkLogger.info("Please specify the filename.txt file which contain the VPD pcd info!")
@@ -118,7 +118,7 @@ def MyOptionParser():
return options, args
-## Start BPDG and call the main functions
+## Start BPDG and call the main functions
#
# This method mainly focus on call GenVPD class member functions to complete
# BPDG's target. It will process VpdFile override, and provide the interface file
@@ -137,19 +137,19 @@ def StartBpdg(InputFileName, MapFileName, VpdFileName, Force):
choice = sys.stdin.readline()
if choice.strip().lower() not in ['y', 'yes', '']:
return
-
+
GenVPD = GenVpd.GenVPD (InputFileName, MapFileName, VpdFileName)
-
- EdkLogger.info('%-24s = %s' % ("VPD input data file: ", InputFileName))
+
+ EdkLogger.info('%-24s = %s' % ("VPD input data file: ", InputFileName))
EdkLogger.info('%-24s = %s' % ("VPD output map file: ", MapFileName))
- EdkLogger.info('%-24s = %s' % ("VPD output binary file: ", VpdFileName))
-
+ EdkLogger.info('%-24s = %s' % ("VPD output binary file: ", VpdFileName))
+
GenVPD.ParserInputFile()
GenVPD.FormatFileLine()
GenVPD.FixVpdOffset()
GenVPD.GenerateVpdFile(MapFileName, VpdFileName)
-
- EdkLogger.info("- Vpd pcd fixed done! -")
+
+ EdkLogger.info("- Vpd pcd fixed done! -")
if __name__ == '__main__':
r = main()
@@ -157,4 +157,4 @@ if __name__ == '__main__':
if r < 0 or r > 127: r = 1
sys.exit(r)
-
+
diff --git a/BaseTools/Source/Python/BPDG/GenVpd.py b/BaseTools/Source/Python/BPDG/GenVpd.py
index 3bae803467..2eefcc2490 100644
--- a/BaseTools/Source/Python/BPDG/GenVpd.py
+++ b/BaseTools/Source/Python/BPDG/GenVpd.py
@@ -31,10 +31,10 @@ _FORMAT_CHAR = {1: 'B',
## The VPD PCD data structure for store and process each VPD PCD entry.
#
-# This class contain method to format and pack pcd's value.
+# This class contain method to format and pack pcd's value.
#
class PcdEntry:
- def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
+ def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
PcdBinOffset=None, PcdBinSize=None, Alignment=None):
self.PcdCName = PcdCName.strip()
self.SkuId = SkuId.strip()
@@ -47,7 +47,7 @@ class PcdEntry:
self.PcdBinOffset = PcdBinOffset
self.PcdBinSize = PcdBinSize
self.Alignment = Alignment
-
+
if self.PcdValue == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
@@ -63,13 +63,13 @@ class PcdEntry:
self._GenOffsetValue ()
## Analyze the string value to judge the PCD's datum type equal to Boolean or not.
- #
+ #
# @param ValueString PCD's value
# @param Size PCD's size
- #
+ #
# @retval True PCD's datum type is Boolean
- # @retval False PCD's datum type is not Boolean.
- #
+ # @retval False PCD's datum type is not Boolean.
+ #
def _IsBoolean(self, ValueString, Size):
if (Size == "1"):
if ValueString.upper() in ["TRUE", "FALSE"]:
@@ -80,10 +80,10 @@ class PcdEntry:
return False
## Convert the PCD's value from string to integer.
- #
+ #
# This function will try to convert the Offset value form string to integer
# for both hexadecimal and decimal.
- #
+ #
def _GenOffsetValue(self):
if self.PcdOffset != "*" :
try:
@@ -96,10 +96,10 @@ class PcdEntry:
"Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
## Pack Boolean type VPD PCD's value form string to binary type.
- #
+ #
# @param ValueString The boolean type string for pack.
- #
- #
+ #
+ #
def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
try:
@@ -115,10 +115,10 @@ class PcdEntry:
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack Integer type VPD PCD's value form string to binary type.
- #
+ #
# @param ValueString The Integer type string for pack.
- #
- #
+ #
+ #
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
@@ -170,7 +170,7 @@ class PcdEntry:
# 3: {bytearray}, only support byte-array.
#
# @param ValueString The Integer type string for pack.
- #
+ #
def _PackPtrValue(self, ValueString, Size):
if ValueString.startswith('L"') or ValueString.startswith("L'"):
self._PackUnicode(ValueString, Size)
@@ -183,9 +183,9 @@ class PcdEntry:
"Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
## Pack an Ascii PCD value.
- #
+ #
# An Ascii string for a PCD should be in format as ""/''.
- #
+ #
def _PackString(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
@@ -198,7 +198,7 @@ class PcdEntry:
QuotedFlag = False
ValueString = ValueString[1:-1]
- # No null-terminator in 'string'
+ # No null-terminator in 'string'
if (QuotedFlag and len(ValueString) + 1 > Size) or (not QuotedFlag and len(ValueString) > Size):
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
@@ -209,9 +209,9 @@ class PcdEntry:
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack a byte-array PCD value.
- #
+ #
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
- #
+ #
def _PackByteArray(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
@@ -261,7 +261,7 @@ class PcdEntry:
self.PcdValue = ReturnArray.tolist()
## Pack a unicode PCD value into byte array.
- #
+ #
# A unicode string for a PCD should be in format as L""/L''.
#
def _PackUnicode(self, UnicodeString, Size):
@@ -271,7 +271,7 @@ class PcdEntry:
QuotedFlag = True
if UnicodeString.startswith("L'"):
- QuotedFlag = False
+ QuotedFlag = False
UnicodeString = UnicodeString[2:-1]
# No null-terminator in L'string'
@@ -304,7 +304,7 @@ class PcdEntry:
# 2. Format the input file data to remove unused lines;
# 3. Fixed offset if needed;
# 4. Generate output file, including guided.map and guided.bin file;
-#
+#
class GenVPD :
## Constructor of DscBuildData
#
@@ -334,9 +334,9 @@ class GenVPD :
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
##
- # Parser the input file which is generated by the build tool. Convert the value of each pcd's
+ # Parser the input file which is generated by the build tool. Convert the value of each pcd's
# from string to it's real format. Also remove the useless line in the input file.
- #
+ #
def ParserInputFile (self):
count = 0
for line in self.FileLinesList:
@@ -390,7 +390,7 @@ class GenVPD :
#
# After remove the useless line, if there are no data remain in the file line list,
# Report warning messages to user's.
- #
+ #
if len(self.FileLinesList) == 0 :
EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file, Please check it.")
@@ -480,14 +480,14 @@ class GenVPD :
continue
##
- # This function used to create a clean list only contain useful information and reorganized to make it
+ # This function used to create a clean list only contain useful information and reorganized to make it
# easy to be sorted
#
def FormatFileLine (self) :
for eachPcd in self.FileLinesList :
if eachPcd.PcdOffset != '*' :
- # Use pcd's Offset value as key, and pcd's Value as value
+ # Use pcd's Offset value as key, and pcd's Value as value
self.PcdFixedOffsetSizeList.append(eachPcd)
else :
# Use pcd's CName as key, and pcd's Size as value
@@ -497,11 +497,11 @@ class GenVPD :
##
# This function is use to fix the offset value which the not specified in the map file.
# Usually it use the star (meaning any offset) character in the offset field
- #
+ #
def FixVpdOffset (self):
# At first, the offset should start at 0
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset
- # value is "*" to insert into.
+ # value is "*" to insert into.
self.PcdFixedOffsetSizeList.sort(lambda x, y: cmp(x.PcdBinOffset, y.PcdBinOffset))
@@ -530,57 +530,57 @@ class GenVPD :
Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdOccupySize
-
+
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
return
- # Check the offset of VPD type pcd's offset start from 0.
+ # Check the offset of VPD type pcd's offset start from 0.
if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
None)
# Judge whether the offset in fixed pcd offset list is overlapped or not.
lenOfList = len(self.PcdFixedOffsetSizeList)
- count = 0
+ count = 0
while (count < lenOfList - 1) :
PcdNow = self.PcdFixedOffsetSizeList[count]
PcdNext = self.PcdFixedOffsetSizeList[count+1]
- # Two pcd's offset is same
+ # Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is same with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
- # Overlapped
+ # Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
- # Has free space, raise a warning message
+ # Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
count += 1
-
+
LastOffset = self.PcdFixedOffsetSizeList[0].PcdBinOffset
FixOffsetSizeListCount = 0
lenOfList = len(self.PcdFixedOffsetSizeList)
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
-
+
##
- # Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
- #
+ # Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
+ #
while (FixOffsetSizeListCount < lenOfList) :
-
- eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
+
+ eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
NowOffset = eachFixedPcd.PcdBinOffset
-
- # Has free space
+
+ # Has free space
if LastOffset < NowOffset :
if lenOfUnfixedList != 0 :
countOfUnfixedList = 0
@@ -598,42 +598,42 @@ class GenVPD :
eachUnfixedPcd.PcdBinOffset = LastOffset
# Insert this pcd into fixed offset pcd list.
self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount, eachUnfixedPcd)
-
+
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(countOfUnfixedList)
-
+
# After item added, should enlarge the length of fixed pcd offset list
- lenOfList += 1
+ lenOfList += 1
FixOffsetSizeListCount += 1
-
+
# Decrease the un-fixed pcd offset list's length
lenOfUnfixedList -= 1
-
- # Modify the last offset value
- LastOffset += needFixPcdSize
+
+ # Modify the last offset value
+ LastOffset += needFixPcdSize
else :
# It can not insert into those two pcds, need to check still has other space can store it.
LastOffset = NowOffset + self.PcdFixedOffsetSizeList[FixOffsetSizeListCount].PcdOccupySize
FixOffsetSizeListCount += 1
break
-
+
# Set the FixOffsetSizeListCount = lenOfList for quit the loop
else :
- FixOffsetSizeListCount = lenOfList
-
- # No free space, smoothly connect with previous pcd.
+ FixOffsetSizeListCount = lenOfList
+
+ # No free space, smoothly connect with previous pcd.
elif LastOffset == NowOffset :
LastOffset = NowOffset + eachFixedPcd.PcdOccupySize
FixOffsetSizeListCount += 1
- # Usually it will not enter into this thunk, if so, means it overlapped.
+ # Usually it will not enter into this thunk, if so, means it overlapped.
else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None)
FixOffsetSizeListCount += 1
-
- # Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
+
+ # Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
lenOfList = len(self.PcdFixedOffsetSizeList)
while (lenOfUnfixedList > 0) :
@@ -641,23 +641,23 @@ class GenVPD :
# The last pcd instance
LastPcd = self.PcdFixedOffsetSizeList[lenOfList-1]
NeedFixPcd = self.PcdUnknownOffsetList[0]
-
+
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset / NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
-
+
# Insert this pcd into fixed offset pcd list's tail.
self.PcdFixedOffsetSizeList.insert(lenOfList, NeedFixPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(0)
-
+
lenOfList += 1
- lenOfUnfixedList -= 1
+ lenOfUnfixedList -= 1
##
# Write the final data into output files.
- #
+ #
def GenerateVpdFile (self, MapFileName, BinFileName):
#Open an VPD file to process
@@ -705,4 +705,4 @@ class GenVPD :
fStringIO.close ()
fVpdFile.close ()
fMapFile.close ()
-
+
diff --git a/BaseTools/Source/Python/BPDG/StringTable.py b/BaseTools/Source/Python/BPDG/StringTable.py
index bbcb451198..dc116f05f4 100644
--- a/BaseTools/Source/Python/BPDG/StringTable.py
+++ b/BaseTools/Source/Python/BPDG/StringTable.py
@@ -1,7 +1,7 @@
## @file
# This file is used to define strings used in the BPDG tool
#
-# Copyright (c) 2010 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -31,8 +31,8 @@ MAP_FILE_COMMENT_TEMPLATE = \
# THIS IS AUTO-GENERATED FILE BY BPDG TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform fixed/adjusted by BPDG tool.
-#
-# Copyright (c) 2010 -2016, Intel Corporation. All rights reserved.<BR>
+#
+# Copyright (c) 2010 -2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -50,18 +50,18 @@ LBL_BPDG_VERSION = (u"1.0")
LBL_BPDG_USAGE = \
(
"""BPDG options -o Filename.bin -m Filename.map Filename.txt
-Copyright (c) 2010 - 2016, Intel Corporation All Rights Reserved.
+Copyright (c) 2010 - 2018, Intel Corporation All Rights Reserved.
Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)
-
+
Required Flags:
-o BIN_FILENAME, --vpd-filename=BIN_FILENAME
Specify the file name for the VPD binary file
-m FILENAME, --map-filename=FILENAME
- Generate file name for consumption during the build that contains
- the mapping of Pcd name, offset, datum size and value derived
+ Generate file name for consumption during the build that contains
+ the mapping of Pcd name, offset, datum size and value derived
from the input file and any automatic calculations.
-"""
+"""
)
MSG_OPTION_HELP = ("Show this help message and exit.")
diff --git a/BaseTools/Source/Python/Common/BuildVersion.py b/BaseTools/Source/Python/Common/BuildVersion.py
index 7414d30f49..c889dea2f0 100644
--- a/BaseTools/Source/Python/Common/BuildVersion.py
+++ b/BaseTools/Source/Python/Common/BuildVersion.py
@@ -2,15 +2,15 @@
#
# This file is for build version number auto generation
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
-gBUILD_VERSION = ""
+gBUILD_VERSION = "Developer Build based on Revision: Unknown"
diff --git a/BaseTools/Source/Python/Common/Database.py b/BaseTools/Source/Python/Common/Database.py
index d3340f5a90..6abfa1f15e 100644
--- a/BaseTools/Source/Python/Common/Database.py
+++ b/BaseTools/Source/Python/Common/Database.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create a database used by ECC tool
#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -33,7 +33,7 @@ from Table.TableDsc import TableDsc
# This class defined the build databse
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
-#
+#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the ECC database
#
@@ -54,7 +54,7 @@ class Database(object):
self.TblInf = TableInf(self.Cur)
self.TblDec = TableDec(self.Cur)
self.TblDsc = TableDsc(self.Cur)
-
+
## Initialize build database
#
# 1. Delete all old existing tables
@@ -69,7 +69,7 @@ class Database(object):
# self.TblDataModel.Drop()
# self.TblDsc.Drop()
# self.TblFile.Drop()
-
+
#
# Create new tables
#
@@ -78,7 +78,7 @@ class Database(object):
self.TblInf.Create()
self.TblDec.Create()
self.TblDsc.Create()
-
+
#
# Initialize table DataModel
#
@@ -91,10 +91,10 @@ class Database(object):
#
def QueryTable(self, Table):
Table.Query()
-
+
## Close entire database
#
- # Commit all first
+ # Commit all first
# Close the connection and cursor
#
def Close(self):
@@ -110,11 +110,11 @@ class Database(object):
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
-
+
Db = Database(DATABASE_PATH)
Db.InitDatabase()
- Db.QueryTable(Db.TblDataModel)
+ Db.QueryTable(Db.TblDataModel)
Db.QueryTable(Db.TblFile)
Db.QueryTable(Db.TblDsc)
Db.Close()
- \ No newline at end of file
+
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
index 044ef8d62b..b56ddd5324 100644
--- a/BaseTools/Source/Python/Common/Misc.py
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -63,11 +63,11 @@ gDependencyDatabase = {} # arch : {file path : [dependent files list]}
_TempInfs = []
def GetVariableOffset(mapfilepath, efifilepath, varnames):
- """ Parse map file to get variable offset in current EFI file
+ """ Parse map file to get variable offset in current EFI file
@param mapfilepath Map file absolution path
@param efifilepath: EFI binary file full path
@param varnames iteratable container whose elements are variable names to be searched
-
+
@return List whos elements are tuple with variable name and raw offset
"""
lines = []
@@ -77,7 +77,7 @@ def GetVariableOffset(mapfilepath, efifilepath, varnames):
f.close()
except:
return None
-
+
if len(lines) == 0: return None
firstline = lines[0].strip()
if (firstline.startswith("Archive member included ") and
@@ -177,7 +177,7 @@ def _parseGeneral(lines, efifilepath, varnames):
continue
if line.startswith("entry point at"):
status = 3
- continue
+ continue
if status == 1 and len(line) != 0:
m = secReGeneral.match(line)
assert m is not None, "Fail to parse the section in map file , line is %s" % line
@@ -257,7 +257,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
#
# A temporary INF is copied to database path which must have write permission
# The temporary will be removed at the end of build
- # In case of name conflict, the file name is
+ # In case of name conflict, the file name is
# FILE_GUIDBaseName (0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf)
#
TempFullPath = os.path.join(DbDir,
@@ -268,7 +268,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
#
# To build same module more than once, the module path with FILE_GUID overridden has
# the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
- # in DSC which is used as relative path by C files and other files in INF.
+ # in DSC which is used as relative path by C files and other files in INF.
# A trick was used: all module paths are PathClass instances, after the initialization
# of PathClass, the PathClass.Path is overridden by the temporary INF path.
#
@@ -287,7 +287,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
# If file exists, compare contents
#
if os.path.exists(TempFullPath):
- with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
+ with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
if f1.read() == f2.read():
return RtPath
_TempInfs.append(TempFullPath)
@@ -1545,29 +1545,29 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
# Used to avoid split issue while the value string contain "|" character
#
# @param[in] Setting: A String contain value/datum type/token number information;
-#
-# @retval ValueList: A List contain value, datum type and toke number.
+#
+# @retval ValueList: A List contain value, datum type and toke number.
#
def AnalyzePcdData(Setting):
ValueList = ['', '', '']
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(Setting)
-
+
ValueUpdateFlag = False
-
+
if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting)
ValueUpdateFlag = True
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
-
+
if ValueUpdateFlag:
ValueList[0] = PtrValue[0]
-
- return ValueList
-
+
+ return ValueList
+
## check format of PCD value against its the datum type
#
# For PCD value setting
@@ -1770,7 +1770,7 @@ class PathClass(object):
OtherKey = Other.Path
else:
OtherKey = str(Other)
-
+
SelfKey = self.Path
if SelfKey == OtherKey:
return 0
@@ -1908,7 +1908,7 @@ class PeImageClass():
def _ByteListToStr(self, ByteList):
String = ''
for index in range(len(ByteList)):
- if ByteList[index] == 0:
+ if ByteList[index] == 0:
break
String += chr(ByteList[index])
return String
@@ -1945,11 +1945,11 @@ class DefaultStore():
if sid == minid:
return name
class SkuClass():
-
+
DEFAULT = 0
SINGLE = 1
MULTIPLE =2
-
+
def __init__(self,SkuIdentifier='', SkuIds=None):
if SkuIds is None:
SkuIds = {}
@@ -1961,7 +1961,7 @@ class SkuClass():
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
% (SkuName, SkuId))
-
+
self.AvailableSkuIds = sdict()
self.SkuIdSet = []
self.SkuIdNumberSet = []
@@ -1975,10 +1975,10 @@ class SkuClass():
self.SkuIdSet = SkuIds.keys()
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
else:
- r = SkuIdentifier.split('|')
+ r = SkuIdentifier.split('|')
self.SkuIdSet=[(r[k].strip()).upper() for k in range(len(r))]
k = None
- try:
+ try:
self.SkuIdNumberSet = [SkuIds[k][0].strip() + 'U' for k in self.SkuIdSet]
except Exception:
EdkLogger.error("build", PARAMETER_INVALID,
@@ -2027,7 +2027,7 @@ class SkuClass():
skuorderset = []
for skuname in self.SkuIdSet:
skuorderset.append(self.GetSkuChain(skuname))
-
+
skuorder = []
for index in range(max(len(item) for item in skuorderset)):
for subset in skuorderset:
@@ -2039,8 +2039,8 @@ class SkuClass():
return skuorder
- def __SkuUsageType(self):
-
+ def __SkuUsageType(self):
+
if self.__SkuIdentifier.upper() == "ALL":
return SkuClass.MULTIPLE
@@ -2073,7 +2073,7 @@ class SkuClass():
return ArrayStr
def __GetAvailableSkuIds(self):
return self.AvailableSkuIds
-
+
def __GetSystemSkuID(self):
if self.__SkuUsageType() == SkuClass.SINGLE:
if len(self.SkuIdSet) == 1:
diff --git a/BaseTools/Source/Python/Common/MultipleWorkspace.py b/BaseTools/Source/Python/Common/MultipleWorkspace.py
index 2a76d49cc6..81594e22cd 100644
--- a/BaseTools/Source/Python/Common/MultipleWorkspace.py
+++ b/BaseTools/Source/Python/Common/MultipleWorkspace.py
@@ -4,7 +4,7 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2015 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -20,16 +20,16 @@ from Common.DataType import TAB_WORKSPACE
## MultipleWorkspace
#
# This class manage multiple workspace behavior
-#
+#
# @param class:
#
# @var WORKSPACE: defined the current WORKSPACE
# @var PACKAGES_PATH: defined the other WORKSAPCE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
-#
+#
class MultipleWorkspace(object):
WORKSPACE = ''
PACKAGES_PATH = None
-
+
## convertPackagePath()
#
# Convert path to match workspace.
@@ -59,7 +59,7 @@ class MultipleWorkspace(object):
cls.PACKAGES_PATH = [cls.convertPackagePath (Ws, os.path.normpath(Path.strip())) for Path in PackagesPath.split(os.pathsep)]
else:
cls.PACKAGES_PATH = []
-
+
## join()
#
# rewrite os.path.join function
@@ -79,7 +79,7 @@ class MultipleWorkspace(object):
return Path
Path = os.path.join(Ws, *p)
return Path
-
+
## relpath()
#
# rewrite os.path.relpath function
@@ -98,7 +98,7 @@ class MultipleWorkspace(object):
if Path.lower().startswith(Ws.lower()):
Path = os.path.relpath(Path, Ws)
return Path
-
+
## getWs()
#
# get valid workspace for the path
@@ -117,7 +117,7 @@ class MultipleWorkspace(object):
if os.path.exists(absPath):
return Pkg
return Ws
-
+
## handleWsMacro()
#
# handle the $(WORKSPACE) tag, if current workspace is invalid path relative the tool, replace it.
@@ -143,7 +143,7 @@ class MultipleWorkspace(object):
PathList[i] = str[0:MacroStartPos] + Path
PathStr = ' '.join(PathList)
return PathStr
-
+
## getPkgPath()
#
# get all package pathes.
@@ -153,4 +153,4 @@ class MultipleWorkspace(object):
@classmethod
def getPkgPath(cls):
return cls.PACKAGES_PATH
- \ No newline at end of file
+
diff --git a/BaseTools/Source/Python/Common/RangeExpression.py b/BaseTools/Source/Python/Common/RangeExpression.py
index 014c75b8ce..20581edadf 100644
--- a/BaseTools/Source/Python/Common/RangeExpression.py
+++ b/BaseTools/Source/Python/Common/RangeExpression.py
@@ -43,7 +43,7 @@ ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOO
class RangeObject(object):
def __init__(self, start, end, empty = False):
-
+
if int(start) < int(end):
self.start = int(start)
self.end = int(end)
@@ -55,24 +55,24 @@ class RangeObject(object):
class RangeContainer(object):
def __init__(self):
self.rangelist = []
-
+
def push(self, RangeObject):
self.rangelist.append(RangeObject)
self.rangelist = sorted(self.rangelist, key = lambda rangeobj : rangeobj.start)
self.merge()
-
+
def pop(self):
for item in self.rangelist:
yield item
-
- def __clean__(self):
+
+ def __clean__(self):
newrangelist = []
for rangeobj in self.rangelist:
if rangeobj.empty == True:
continue
else:
newrangelist.append(rangeobj)
- self.rangelist = newrangelist
+ self.rangelist = newrangelist
def merge(self):
self.__clean__()
for i in range(0, len(self.rangelist) - 1):
@@ -80,23 +80,23 @@ class RangeContainer(object):
continue
else:
self.rangelist[i + 1].start = self.rangelist[i].start
- self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
+ self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
self.rangelist[i].empty = True
self.__clean__()
-
+
def dump(self):
print("----------------------")
rangelist = ""
for object in self.rangelist:
rangelist = rangelist + "[%d , %d]" % (object.start, object.end)
print(rangelist)
-
-
-class XOROperatorObject(object):
- def __init__(self):
+
+
+class XOROperatorObject(object):
+ def __init__(self):
pass
- def Calculate(self, Operand, DataType, SymbolTable):
+ def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "XOR ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -108,9 +108,9 @@ class XOROperatorObject(object):
return rangeId
class LEOperatorObject(object):
- def __init__(self):
+ def __init__(self):
pass
- def Calculate(self, Operand, DataType, SymbolTable):
+ def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "LE ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -120,22 +120,22 @@ class LEOperatorObject(object):
SymbolTable[rangeId1] = rangeContainer
return rangeId1
class LTOperatorObject(object):
- def __init__(self):
+ def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
- Expr = "LT ..."
- raise BadExpression(ERR_SNYTAX % Expr)
+ Expr = "LT ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(0, int(Operand) - 1))
SymbolTable[rangeId1] = rangeContainer
- return rangeId1
+ return rangeId1
class GEOperatorObject(object):
- def __init__(self):
+ def __init__(self):
pass
- def Calculate(self, Operand, DataType, SymbolTable):
+ def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "GE ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -143,12 +143,12 @@ class GEOperatorObject(object):
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand), MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId1] = rangeContainer
- return rangeId1
-
+ return rangeId1
+
class GTOperatorObject(object):
- def __init__(self):
+ def __init__(self):
pass
- def Calculate(self, Operand, DataType, SymbolTable):
+ def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "GT ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -156,12 +156,12 @@ class GTOperatorObject(object):
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId1] = rangeContainer
- return rangeId1
-
+ return rangeId1
+
class EQOperatorObject(object):
- def __init__(self):
+ def __init__(self):
pass
- def Calculate(self, Operand, DataType, SymbolTable):
+ def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "EQ ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -169,8 +169,8 @@ class EQOperatorObject(object):
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand), int(Operand)))
SymbolTable[rangeId1] = rangeContainer
- return rangeId1
-
+ return rangeId1
+
def GetOperatorObject(Operator):
if Operator == '>':
return GTOperatorObject()
@@ -214,8 +214,8 @@ class RangeExpression(BaseExpression):
NumberDict[HexNumber] = Number
for HexNum in NumberDict:
expr = expr.replace(HexNum, NumberDict[HexNum])
-
- rangedict = {}
+
+ rangedict = {}
for validrange in self.RangePattern.findall(expr):
start, end = validrange.split(" - ")
start = start.strip()
@@ -225,19 +225,19 @@ class RangeExpression(BaseExpression):
rangeContainer.push(RangeObject(start, end))
self.operanddict[str(rangeid)] = rangeContainer
rangedict[validrange] = str(rangeid)
-
+
for validrange in rangedict:
expr = expr.replace(validrange, rangedict[validrange])
-
- self._Expr = expr
+
+ self._Expr = expr
return expr
-
-
+
+
def EvalRange(self, Operator, Oprand):
operatorobj = GetOperatorObject(Operator)
return operatorobj.Calculate(Oprand, self.PcdDataType, self.operanddict)
-
+
def Rangeintersection(self, Oprand1, Oprand2):
rangeContainer1 = self.operanddict[Oprand1]
rangeContainer2 = self.operanddict[Oprand2]
@@ -266,35 +266,35 @@ class RangeExpression(BaseExpression):
elif end1 >= end2:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(start2, end2))
-
+
self.operanddict[rangeid] = rangeContainer
# rangeContainer.dump()
return rangeid
-
+
def Rangecollections(self, Oprand1, Oprand2):
rangeContainer1 = self.operanddict[Oprand1]
rangeContainer2 = self.operanddict[Oprand2]
rangeContainer = RangeContainer()
-
+
for rangeobj in rangeContainer2.pop():
rangeContainer.push(rangeobj)
for rangeobj in rangeContainer1.pop():
rangeContainer.push(rangeobj)
-
+
rangeid = str(uuid.uuid1())
self.operanddict[rangeid] = rangeContainer
-
+
# rangeContainer.dump()
return rangeid
-
-
+
+
def NegtiveRange(self, Oprand1):
rangeContainer1 = self.operanddict[Oprand1]
-
-
+
+
rangeids = []
-
+
for rangeobj in rangeContainer1.pop():
rangeContainer = RangeContainer()
rangeid = str(uuid.uuid1())
@@ -321,13 +321,13 @@ class RangeExpression(BaseExpression):
re = self.Rangeintersection(rangeids[0], rangeids[1])
for i in range(2, len(rangeids)):
re = self.Rangeintersection(re, rangeids[i])
-
+
rangeid2 = str(uuid.uuid1())
self.operanddict[rangeid2] = self.operanddict[re]
return rangeid2
-
+
def Eval(self, Operator, Oprand1, Oprand2 = None):
-
+
if Operator in ["!", "NOT", "not"]:
if not gGuidPattern.match(Oprand1.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
@@ -338,7 +338,7 @@ class RangeExpression(BaseExpression):
elif Operator == 'and' :
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
- return self.Rangeintersection(Oprand1, Oprand2)
+ return self.Rangeintersection(Oprand1, Oprand2)
elif Operator == 'or':
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
@@ -369,11 +369,11 @@ class RangeExpression(BaseExpression):
self._Len = len(self._Expr)
self._Token = ''
self._WarnExcept = None
-
+
# Literal token without any conversion
self._LiteralToken = ''
-
+
# store the operand object
self.operanddict = {}
# The Pcd max value depends on PcdDataType
@@ -393,9 +393,9 @@ class RangeExpression(BaseExpression):
self._Depth = Depth
self._Expr = self._Expr.strip()
-
+
self.preProcessRangeExpr(self._Expr)
-
+
# check if the expression does not need to evaluate
if RealValue and Depth == 0:
self._Token = self._Expr
@@ -407,12 +407,12 @@ class RangeExpression(BaseExpression):
Val = self._OrExpr()
RealVal = Val
-
+
RangeIdList = RealVal.split("or")
RangeList = []
for rangeid in RangeIdList:
RangeList.append(self.operanddict[rangeid.strip()])
-
+
return RangeList
# Template function to parse binary operators which have same precedence
diff --git a/BaseTools/Source/Python/Common/StringUtils.py b/BaseTools/Source/Python/Common/StringUtils.py
index 3f6bae3bdc..723faac0f9 100644
--- a/BaseTools/Source/Python/Common/StringUtils.py
+++ b/BaseTools/Source/Python/Common/StringUtils.py
@@ -839,7 +839,7 @@ def StringToArray(String):
return "{%s,0x00}" % ",".join(C.strip() for C in String[1:-1].split(','))
else:
return "{%s}" % ",".join(C.strip() for C in String[1:-1].split(','))
-
+
else:
if len(String.split()) % 2:
return '{%s,0}' % ','.join(String.split())
diff --git a/BaseTools/Source/Python/Common/VariableAttributes.py b/BaseTools/Source/Python/Common/VariableAttributes.py
index a2e22ca040..522adac8d5 100644
--- a/BaseTools/Source/Python/Common/VariableAttributes.py
+++ b/BaseTools/Source/Python/Common/VariableAttributes.py
@@ -1,9 +1,9 @@
# # @file
-#
+#
# This file is used to handle the variable attributes and property information
#
#
-# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -12,7 +12,7 @@
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
-
+
class VariableAttributes(object):
EFI_VARIABLE_NON_VOLATILE = 0x00000001
EFI_VARIABLE_BOOTSERVICE_ACCESS = 0x00000002
@@ -24,22 +24,22 @@ class VariableAttributes(object):
"RT":EFI_VARIABLE_RUNTIME_ACCESS,
"RO":VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
}
-
+
def __init__(self):
pass
-
+
@staticmethod
def GetVarAttributes(var_attr_str):
VarAttr = 0x00000000
VarProp = 0x00000000
-
+
attr_list = var_attr_str.split(",")
for attr in attr_list:
attr = attr.strip()
if attr == 'RO':
VarProp = VariableAttributes.VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
else:
- VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
+ VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
return VarAttr, VarProp
@staticmethod
def ValidateVarAttributes(var_attr_str):
diff --git a/BaseTools/Source/Python/Common/VpdInfoFile.py b/BaseTools/Source/Python/Common/VpdInfoFile.py
index b98c021b57..0485bf482e 100644
--- a/BaseTools/Source/Python/Common/VpdInfoFile.py
+++ b/BaseTools/Source/Python/Common/VpdInfoFile.py
@@ -1,9 +1,9 @@
## @file
-#
+#
# This package manage the VPD PCD information file which will be generated
# by build tool's autogen.
# The VPD PCD information file will be input for third-party BPDG tool which
-# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
+# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
#
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
@@ -33,8 +33,8 @@ FILE_COMMENT_TEMPLATE = \
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform collected by build.exe.
-#
-# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -70,17 +70,17 @@ FILE_COMMENT_TEMPLATE = \
#
class VpdInfoFile:
- _rVpdPcdLine = None
+ _rVpdPcdLine = None
## Constructor
def __init__(self):
## Dictionary for VPD in following format
#
- # Key : PcdClassObject instance.
+ # Key : PcdClassObject instance.
# @see BuildClassObject.PcdClassObject
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
self._VpdArray = {}
self._VpdInfo = {}
-
+
## Add a VPD PCD collected from platform's autogen when building.
#
# @param vpds The list of VPD PCD collected for a platform.
@@ -91,40 +91,40 @@ class VpdInfoFile:
def Add(self, Vpd, skuname, Offset):
if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
-
+
if not (Offset >= 0 or Offset == "*"):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
-
+
if Vpd.DatumType == TAB_VOID:
if Vpd.MaxDatumSize <= 0:
- EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
- elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
if not Vpd.MaxDatumSize:
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
else:
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
-
+
if Vpd not in self._VpdArray:
#
- # If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
+ # If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
#
self._VpdArray[Vpd] = {}
self._VpdArray[Vpd].update({skuname:Offset})
-
-
+
+
## Generate VPD PCD information into a text file
- #
+ #
# If parameter FilePath is invalid, then assert.
- # If
+ # If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
if not (FilePath is not None or len(FilePath) != 0):
- EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
- "Invalid parameter FilePath: %s." % FilePath)
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid parameter FilePath: %s." % FilePath)
Content = FILE_COMMENT_TEMPLATE
Pcds = sorted(self._VpdArray.keys())
@@ -155,15 +155,15 @@ class VpdInfoFile:
try:
fd = open(FilePath, "r")
except:
- EdkLogger.error("VpdInfoFile",
- BuildToolError.FILE_OPEN_FAILURE,
+ EdkLogger.error("VpdInfoFile",
+ BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
Lines = fd.readlines()
for Line in Lines:
Line = Line.strip()
if len(Line) == 0 or Line.startswith("#"):
continue
-
+
#
# the line must follow output format defined in BPDG spec.
#
@@ -173,9 +173,9 @@ class VpdInfoFile:
TokenSpaceName, PcdTokenName = PcdName.split(".")
except:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
-
+
Found = False
-
+
if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
self._VpdInfo[(TokenSpaceName, PcdTokenName)] = []
self._VpdInfo[(TokenSpaceName, PcdTokenName)].append((SkuId, Offset, Value))
@@ -188,62 +188,62 @@ class VpdInfoFile:
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObjectTokenCName == PcdTokenName.strip() and sku == SkuId:
if self._VpdArray[VpdObject][sku] == "*":
if Offset == "*":
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
self._VpdArray[VpdObject][sku] = Offset
Found = True
if not Found:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
-
+
## Get count of VPD PCD collected from platform's autogen when building.
#
- # @return The integer count value
+ # @return The integer count value
def GetCount(self):
Count = 0
for OffsetList in self._VpdArray.values():
Count += len(OffsetList)
-
+
return Count
-
+
## Get an offset value for a given VPD PCD
#
- # Because BPDG only support one Sku, so only return offset for SKU default.
+ # Because BPDG only support one Sku, so only return offset for SKU default.
#
- # @param vpd A given VPD PCD
+ # @param vpd A given VPD PCD
def GetOffset(self, vpd):
if vpd not in self._VpdArray:
return None
-
+
if len(self._VpdArray[vpd]) == 0:
return None
-
+
return self._VpdArray[vpd]
def GetVpdInfo(self, arg):
(PcdTokenName, TokenSpaceName) = arg
return self._VpdInfo.get((TokenSpaceName, PcdTokenName))
-
+
## Call external BPDG tool to process VPD file
-#
+#
# @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
-#
+#
def CallExtenalBPDGTool(ToolPath, VpdFileName):
assert ToolPath is not None, "Invalid parameter ToolPath"
assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
-
+
OutputDir = os.path.dirname(VpdFileName)
FileName = os.path.basename(VpdFileName)
BaseName, ext = os.path.splitext(FileName)
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
-
+
try:
PopenObject = subprocess.Popen(' '.join([ToolPath,
- '-o', OutputBinFileName,
+ '-o', OutputBinFileName,
'-m', OutputMapFileName,
'-q',
'-f',
VpdFileName]),
- stdout=subprocess.PIPE,
+ stdout=subprocess.PIPE,
stderr= subprocess.PIPE,
shell=True)
except Exception as X:
@@ -252,11 +252,11 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
print(out)
while PopenObject.returncode is None :
PopenObject.wait()
-
+
if PopenObject.returncode != 0:
if PopenObject.returncode != 0:
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
(PopenObject.returncode, str(error)))
-
+
return PopenObject.returncode
diff --git a/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
index e29f5211d5..a98cf8a7c5 100644
--- a/BaseTools/Source/Python/CommonDataClass/CommonClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
@@ -14,7 +14,7 @@
## SkuInfoClass
#
# This class defined SkuInfo item used in Module/Platform/Package files
-#
+#
# @param object: Inherited from object class
# @param SkuIdName: Input value for SkuIdName, default is ''
# @param SkuId: Input value for SkuId, default is ''
@@ -35,11 +35,11 @@
# @var DefaultValue: To store value for DefaultValue
#
class SkuInfoClass(object):
- def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
+ def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
HiiDefaultValue = '', VpdOffset = '', DefaultValue = '', VariableGuidValue = '', VariableAttribute = '', DefaultStore = None):
self.SkuIdName = SkuIdName
self.SkuId = SkuId
-
+
#
# Used by Hii
#
@@ -52,17 +52,17 @@ class SkuInfoClass(object):
self.HiiDefaultValue = HiiDefaultValue
self.VariableAttribute = VariableAttribute
self.DefaultStoreDict = DefaultStore
-
+
#
# Used by Vpd
#
self.VpdOffset = VpdOffset
-
+
#
# Used by Default
#
self.DefaultValue = DefaultValue
-
+
## Convert the class to a string
#
# Convert each member of the class to string
diff --git a/BaseTools/Source/Python/CommonDataClass/FdfClass.py b/BaseTools/Source/Python/CommonDataClass/FdfClass.py
index 96a630f4d2..64b58c2078 100644
--- a/BaseTools/Source/Python/CommonDataClass/FdfClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/FdfClass.py
@@ -1,7 +1,7 @@
## @file
# classes represent data in FDF
#
-# Copyright (c) 2007 - 2013, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -83,7 +83,7 @@ class RegionClassObject:
## FFS data in FDF
#
-#
+#
class FfsClassObject:
## The constructor
#
@@ -98,7 +98,7 @@ class FfsClassObject:
## FILE statement data in FDF
#
-#
+#
class FileStatementClassObject (FfsClassObject) :
## The constructor
#
@@ -149,7 +149,7 @@ class AprioriSectionClassObject:
## section data in FDF
#
-#
+#
class SectionClassObject:
## The constructor
#
@@ -157,10 +157,10 @@ class SectionClassObject:
#
def __init__(self):
self.Alignment = None
-
+
## Depex expression section in FDF
#
-#
+#
class DepexSectionClassObject (SectionClassObject):
## The constructor
#
@@ -186,7 +186,7 @@ class CompressSectionClassObject (SectionClassObject) :
## Data section data in FDF
#
-#
+#
class DataSectionClassObject (SectionClassObject):
## The constructor
#
@@ -220,7 +220,7 @@ class EfiSectionClassObject (SectionClassObject):
## FV image section data in FDF
#
-#
+#
class FvImageSectionClassObject (SectionClassObject):
## The constructor
#
@@ -237,7 +237,7 @@ class FvImageSectionClassObject (SectionClassObject):
## GUIDed section data in FDF
#
-#
+#
class GuidSectionClassObject (SectionClassObject) :
## The constructor
#
@@ -270,7 +270,7 @@ class UiSectionClassObject (SectionClassObject):
## Version section data in FDF
#
-#
+#
class VerSectionClassObject (SectionClassObject):
## The constructor
#
@@ -305,7 +305,7 @@ class RuleClassObject :
## Complex rule data in FDF
#
-#
+#
class RuleComplexFileClassObject(RuleClassObject) :
## The constructor
#
@@ -343,7 +343,7 @@ class RuleFileExtensionClassObject(RuleClassObject):
## Capsule data in FDF
#
-#
+#
class CapsuleClassObject :
## The constructor
#
@@ -380,7 +380,7 @@ class VtfClassObject :
## VTF component data in FDF
#
-#
+#
class ComponentStatementClassObject :
## The constructor
#
@@ -396,7 +396,7 @@ class ComponentStatementClassObject :
self.CompSym = None
self.CompSize = None
self.FilePos = None
-
+
## OptionROM data in FDF
#
#
@@ -408,4 +408,4 @@ class OptionRomClassObject:
def __init__(self):
self.DriverName = None
self.FfsList = []
-
+
diff --git a/BaseTools/Source/Python/Ecc/CLexer.py b/BaseTools/Source/Python/Ecc/CLexer.py
index a496f43440..b9e57c1b55 100644
--- a/BaseTools/Source/Python/Ecc/CLexer.py
+++ b/BaseTools/Source/Python/Ecc/CLexer.py
@@ -2,7 +2,7 @@
from antlr3 import *
from antlr3.compat import set, frozenset
-
+
## @file
# The file defines the Lexer for C source files.
#
@@ -10,7 +10,7 @@ from antlr3.compat import set, frozenset
# This file is generated by running:
# java org.antlr.Tool C.g
#
-# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@@ -4341,7 +4341,7 @@ class CLexer(Lexer):
u"\12\uffff"
)
-
+
DFA25_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
@@ -4479,7 +4479,7 @@ class CLexer(Lexer):
u"\u0192\uffff"
)
-
+
DFA35_transition = [
DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
@@ -4943,5 +4943,5 @@ class CLexer(Lexer):
# class definition for DFA #35
DFA35 = DFA
-
+
diff --git a/BaseTools/Source/Python/Ecc/CParser.py b/BaseTools/Source/Python/Ecc/CParser.py
index d7eff138da..b66ac2d8d5 100644
--- a/BaseTools/Source/Python/Ecc/CParser.py
+++ b/BaseTools/Source/Python/Ecc/CParser.py
@@ -3,7 +3,7 @@
from __future__ import print_function
from antlr3 import *
from antlr3.compat import set, frozenset
-
+
## @file
# The file defines the parser for C source files.
#
@@ -11,7 +11,7 @@ from antlr3.compat import set, frozenset
# This file is generated by running:
# java org.antlr.Tool C.g
#
-# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@@ -57,23 +57,23 @@ OctalEscape=17
# token names
tokenNames = [
- "<invalid>", "<EOR>", "<DOWN>", "<UP>",
- "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
- "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
- "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
- "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
- "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
- "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
- "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
- "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
- "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
- "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
- "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
- "'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
- "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
- "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
- "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
- "'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
+ "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
+ "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
+ "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
+ "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
+ "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
+ "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
+ "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
+ "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
+ "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
+ "'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
+ "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
+ "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
+ "'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
"'goto'", "'continue'", "'break'", "'return'"
]
@@ -106,33 +106,33 @@ class CParser(Parser):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
- PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.PredicateExpressionList.append(PredExp)
-
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
- EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.EnumerationDefinitionList.append(EnumDef)
-
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
- SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.StructUnionDefinitionList.append(SUDef)
-
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
- Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.TypedefDefinitionList.append(Tdef)
-
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
- FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
- FileProfile.FunctionDefinitionList.append(FuncDef)
-
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
- VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.VariableDeclarationList.append(VarDecl)
-
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
- FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.FunctionCallingList.append(FuncCall)
-
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
@@ -144,7 +144,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1):
- return
+ return
# C.g:103:2: ( ( external_declaration )* )
# C.g:103:4: ( external_declaration )*
@@ -163,7 +163,7 @@ class CParser(Parser):
self.external_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -183,7 +183,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end translation_unit
@@ -196,7 +196,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2):
- return
+ return
# C.g:119:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? )
alt3 = 3
@@ -212,7 +212,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input)
@@ -228,7 +228,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input)
@@ -244,7 +244,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input)
@@ -260,7 +260,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input)
@@ -276,7 +276,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input)
@@ -292,7 +292,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input)
@@ -308,7 +308,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input)
@@ -324,7 +324,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input)
@@ -340,7 +340,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input)
@@ -356,7 +356,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input)
@@ -372,7 +372,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input)
@@ -388,7 +388,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input)
@@ -406,7 +406,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input)
@@ -422,7 +422,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input)
@@ -440,7 +440,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input)
@@ -456,7 +456,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input)
@@ -472,7 +472,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input)
@@ -485,7 +485,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input)
@@ -497,7 +497,7 @@ class CParser(Parser):
self.function_definition()
self.following.pop()
if self.failed:
- return
+ return
elif alt3 == 2:
@@ -506,7 +506,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
elif alt3 == 3:
@@ -515,7 +515,7 @@ class CParser(Parser):
self.macro_statement()
self.following.pop()
if self.failed:
- return
+ return
# C.g:121:20: ( ';' )?
alt2 = 2
LA2_0 = self.input.LA(1)
@@ -526,7 +526,7 @@ class CParser(Parser):
# C.g:121:21: ';'
self.match(self.input, 25, self.FOLLOW_25_in_external_declaration126)
if self.failed:
- return
+ return
@@ -542,7 +542,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end external_declaration
@@ -569,7 +569,7 @@ class CParser(Parser):
declarator1 = None
-
+
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = ''
self.function_definition_stack[-1].LBLine = 0
@@ -783,7 +783,7 @@ class CParser(Parser):
if self.backtracking == 0:
-
+
if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
else:
@@ -797,7 +797,7 @@ class CParser(Parser):
else:
self.function_definition_stack[-1].LBLine = b.start.line
self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
-
+
@@ -805,7 +805,7 @@ class CParser(Parser):
retval.stop = self.input.LT(-1)
if self.backtracking == 0:
-
+
self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
@@ -845,7 +845,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4):
- return
+ return
# C.g:167:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' )
alt9 = 2
@@ -858,7 +858,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input)
@@ -869,7 +869,7 @@ class CParser(Parser):
a = self.input.LT(1)
self.match(self.input, 26, self.FOLLOW_26_in_declaration203)
if self.failed:
- return
+ return
# C.g:167:17: (b= declaration_specifiers )?
alt7 = 2
LA7 = self.input.LA(1)
@@ -906,7 +906,7 @@ class CParser(Parser):
b = self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
@@ -914,18 +914,18 @@ class CParser(Parser):
c = self.init_declarator_list()
self.following.pop()
if self.failed:
- return
+ return
d = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration220)
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
-
+
@@ -935,7 +935,7 @@ class CParser(Parser):
s = self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# C.g:175:30: (t= init_declarator_list )?
alt8 = 2
LA8_0 = self.input.LA(1)
@@ -948,19 +948,19 @@ class CParser(Parser):
t = self.init_declarator_list()
self.following.pop()
if self.failed:
- return
+ return
e = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration243)
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
-
+
@@ -974,7 +974,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end declaration
@@ -1185,7 +1185,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7):
- return
+ return
# C.g:194:2: ( declarator ( '=' initializer )? )
# C.g:194:4: declarator ( '=' initializer )?
@@ -1193,7 +1193,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:194:15: ( '=' initializer )?
alt12 = 2
LA12_0 = self.input.LA(1)
@@ -1204,12 +1204,12 @@ class CParser(Parser):
# C.g:194:16: '=' initializer
self.match(self.input, 28, self.FOLLOW_28_in_init_declarator329)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_initializer_in_init_declarator331)
self.initializer()
self.following.pop()
if self.failed:
- return
+ return
@@ -1226,7 +1226,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end init_declarator
@@ -1239,7 +1239,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8):
- return
+ return
# C.g:198:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' )
# C.g:
@@ -1251,7 +1251,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -1273,7 +1273,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end storage_class_specifier
@@ -1291,7 +1291,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9):
- return
+ return
# C.g:206:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id )
alt13 = 12
@@ -1324,7 +1324,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input)
@@ -1334,63 +1334,63 @@ class CParser(Parser):
# C.g:206:4: 'void'
self.match(self.input, 34, self.FOLLOW_34_in_type_specifier376)
if self.failed:
- return
+ return
elif alt13 == 2:
# C.g:207:4: 'char'
self.match(self.input, 35, self.FOLLOW_35_in_type_specifier381)
if self.failed:
- return
+ return
elif alt13 == 3:
# C.g:208:4: 'short'
self.match(self.input, 36, self.FOLLOW_36_in_type_specifier386)
if self.failed:
- return
+ return
elif alt13 == 4:
# C.g:209:4: 'int'
self.match(self.input, 37, self.FOLLOW_37_in_type_specifier391)
if self.failed:
- return
+ return
elif alt13 == 5:
# C.g:210:4: 'long'
self.match(self.input, 38, self.FOLLOW_38_in_type_specifier396)
if self.failed:
- return
+ return
elif alt13 == 6:
# C.g:211:4: 'float'
self.match(self.input, 39, self.FOLLOW_39_in_type_specifier401)
if self.failed:
- return
+ return
elif alt13 == 7:
# C.g:212:4: 'double'
self.match(self.input, 40, self.FOLLOW_40_in_type_specifier406)
if self.failed:
- return
+ return
elif alt13 == 8:
# C.g:213:4: 'signed'
self.match(self.input, 41, self.FOLLOW_41_in_type_specifier411)
if self.failed:
- return
+ return
elif alt13 == 9:
# C.g:214:4: 'unsigned'
self.match(self.input, 42, self.FOLLOW_42_in_type_specifier416)
if self.failed:
- return
+ return
elif alt13 == 10:
@@ -1399,12 +1399,12 @@ class CParser(Parser):
s = self.struct_or_union_specifier()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
-
+
@@ -1414,12 +1414,12 @@ class CParser(Parser):
e = self.enum_specifier()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
-
+
@@ -1429,7 +1429,7 @@ class CParser(Parser):
self.type_id()
self.following.pop()
if self.failed:
- return
+ return
@@ -1442,7 +1442,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_specifier
@@ -1455,13 +1455,13 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10):
- return
+ return
# C.g:229:5: ( IDENTIFIER )
# C.g:229:9: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id467)
if self.failed:
- return
+ return
@@ -1475,7 +1475,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_id
@@ -1612,7 +1612,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12):
- return
+ return
# C.g:240:2: ( 'struct' | 'union' )
# C.g:
@@ -1624,7 +1624,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -1646,7 +1646,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_or_union
@@ -1659,7 +1659,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13):
- return
+ return
# C.g:245:2: ( ( struct_declaration )+ )
# C.g:245:4: ( struct_declaration )+
@@ -1679,7 +1679,7 @@ class CParser(Parser):
self.struct_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -1688,7 +1688,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(16, self.input)
raise eee
@@ -1709,7 +1709,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declaration_list
@@ -1722,7 +1722,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14):
- return
+ return
# C.g:249:2: ( specifier_qualifier_list struct_declarator_list ';' )
# C.g:249:4: specifier_qualifier_list struct_declarator_list ';'
@@ -1730,15 +1730,15 @@ class CParser(Parser):
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration551)
self.struct_declarator_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration553)
if self.failed:
- return
+ return
@@ -1752,7 +1752,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declaration
@@ -1765,7 +1765,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15):
- return
+ return
# C.g:253:2: ( ( type_qualifier | type_specifier )+ )
# C.g:253:4: ( type_qualifier | type_specifier )+
@@ -1832,7 +1832,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
elif alt17 == 2:
@@ -1841,7 +1841,7 @@ class CParser(Parser):
self.type_specifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -1850,7 +1850,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(17, self.input)
raise eee
@@ -1871,7 +1871,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end specifier_qualifier_list
@@ -1884,7 +1884,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16):
- return
+ return
# C.g:257:2: ( struct_declarator ( ',' struct_declarator )* )
# C.g:257:4: struct_declarator ( ',' struct_declarator )*
@@ -1892,7 +1892,7 @@ class CParser(Parser):
self.struct_declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:257:22: ( ',' struct_declarator )*
while True: #loop18
alt18 = 2
@@ -1906,12 +1906,12 @@ class CParser(Parser):
# C.g:257:23: ',' struct_declarator
self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list587)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list589)
self.struct_declarator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -1931,7 +1931,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declarator_list
@@ -1944,7 +1944,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17):
- return
+ return
# C.g:261:2: ( declarator ( ':' constant_expression )? | ':' constant_expression )
alt20 = 2
@@ -1957,7 +1957,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input)
@@ -1969,7 +1969,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:261:15: ( ':' constant_expression )?
alt19 = 2
LA19_0 = self.input.LA(1)
@@ -1980,12 +1980,12 @@ class CParser(Parser):
# C.g:261:16: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator605)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator607)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -1995,12 +1995,12 @@ class CParser(Parser):
# C.g:262:4: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator614)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator616)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -2013,7 +2013,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declarator
@@ -2181,7 +2181,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19):
- return
+ return
# C.g:273:2: ( enumerator ( ',' enumerator )* )
# C.g:273:4: enumerator ( ',' enumerator )*
@@ -2189,7 +2189,7 @@ class CParser(Parser):
self.enumerator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:273:15: ( ',' enumerator )*
while True: #loop24
alt24 = 2
@@ -2208,12 +2208,12 @@ class CParser(Parser):
# C.g:273:16: ',' enumerator
self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list680)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_enumerator_in_enumerator_list682)
self.enumerator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -2233,7 +2233,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end enumerator_list
@@ -2246,13 +2246,13 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20):
- return
+ return
# C.g:277:2: ( IDENTIFIER ( '=' constant_expression )? )
# C.g:277:4: IDENTIFIER ( '=' constant_expression )?
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator695)
if self.failed:
- return
+ return
# C.g:277:15: ( '=' constant_expression )?
alt25 = 2
LA25_0 = self.input.LA(1)
@@ -2263,12 +2263,12 @@ class CParser(Parser):
# C.g:277:16: '=' constant_expression
self.match(self.input, 28, self.FOLLOW_28_in_enumerator698)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_enumerator700)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -2285,7 +2285,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end enumerator
@@ -2298,7 +2298,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21):
- return
+ return
# C.g:281:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' )
# C.g:
@@ -2310,7 +2310,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -2332,7 +2332,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_qualifier
@@ -2487,7 +2487,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23):
- return
+ return
# C.g:303:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ )
alt34 = 2
@@ -2500,7 +2500,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input)
@@ -2510,7 +2510,7 @@ class CParser(Parser):
# C.g:303:4: IDENTIFIER ( declarator_suffix )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator819)
if self.failed:
- return
+ return
# C.g:303:15: ( declarator_suffix )*
while True: #loop31
alt31 = 2
@@ -2754,7 +2754,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -2767,7 +2767,7 @@ class CParser(Parser):
# C.g:304:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+
self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator827)
if self.failed:
- return
+ return
# C.g:304:8: ( 'EFIAPI' )?
alt32 = 2
LA32_0 = self.input.LA(1)
@@ -2781,7 +2781,7 @@ class CParser(Parser):
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator830)
if self.failed:
- return
+ return
@@ -2789,10 +2789,10 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_direct_declarator836)
if self.failed:
- return
+ return
# C.g:304:35: ( declarator_suffix )+
cnt33 = 0
while True: #loop33
@@ -3037,7 +3037,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3046,7 +3046,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(33, self.input)
raise eee
@@ -3066,7 +3066,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end direct_declarator
@@ -3079,7 +3079,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24):
- return
+ return
# C.g:308:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' )
alt35 = 5
@@ -3095,7 +3095,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input)
@@ -3117,7 +3117,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 29, self.input)
@@ -3126,7 +3126,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input)
@@ -3135,7 +3135,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input)
@@ -3145,65 +3145,65 @@ class CParser(Parser):
# C.g:308:6: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix852)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix854)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix856)
if self.failed:
- return
+ return
elif alt35 == 2:
# C.g:309:9: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix866)
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix868)
if self.failed:
- return
+ return
elif alt35 == 3:
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix878)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix880)
self.parameter_type_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix882)
if self.failed:
- return
+ return
elif alt35 == 4:
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix892)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix894)
self.identifier_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix896)
if self.failed:
- return
+ return
elif alt35 == 5:
# C.g:312:9: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix906)
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix908)
if self.failed:
- return
+ return
@@ -3216,7 +3216,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end declarator_suffix
@@ -3229,7 +3229,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25):
- return
+ return
# C.g:316:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' )
alt38 = 3
@@ -3247,7 +3247,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input)
@@ -3263,7 +3263,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input)
@@ -3279,7 +3279,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input)
@@ -3295,7 +3295,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 5, self.input)
@@ -3313,7 +3313,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 21, self.input)
@@ -3329,7 +3329,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input)
@@ -3338,7 +3338,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input)
@@ -3347,7 +3347,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input)
@@ -3357,7 +3357,7 @@ class CParser(Parser):
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_pointer919)
if self.failed:
- return
+ return
# C.g:316:8: ( type_qualifier )+
cnt36 = 0
while True: #loop36
@@ -3405,7 +3405,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3414,7 +3414,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(36, self.input)
raise eee
@@ -3437,7 +3437,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
@@ -3447,19 +3447,19 @@ class CParser(Parser):
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_pointer930)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_pointer_in_pointer932)
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
elif alt38 == 3:
# C.g:318:4: '*'
self.match(self.input, 66, self.FOLLOW_66_in_pointer937)
if self.failed:
- return
+ return
@@ -3472,7 +3472,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end pointer
@@ -3485,7 +3485,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26):
- return
+ return
# C.g:322:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? )
# C.g:322:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )?
@@ -3493,7 +3493,7 @@ class CParser(Parser):
self.parameter_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:322:19: ( ',' ( 'OPTIONAL' )? '...' )?
alt40 = 2
LA40_0 = self.input.LA(1)
@@ -3504,7 +3504,7 @@ class CParser(Parser):
# C.g:322:20: ',' ( 'OPTIONAL' )? '...'
self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list951)
if self.failed:
- return
+ return
# C.g:322:24: ( 'OPTIONAL' )?
alt39 = 2
LA39_0 = self.input.LA(1)
@@ -3515,13 +3515,13 @@ class CParser(Parser):
# C.g:322:25: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list954)
if self.failed:
- return
+ return
self.match(self.input, 67, self.FOLLOW_67_in_parameter_type_list958)
if self.failed:
- return
+ return
@@ -3538,7 +3538,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end parameter_type_list
@@ -3551,7 +3551,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27):
- return
+ return
# C.g:326:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* )
# C.g:326:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )*
@@ -3559,7 +3559,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
# C.g:326:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )*
while True: #loop42
alt42 = 2
@@ -3585,7 +3585,7 @@ class CParser(Parser):
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_parameter_list974)
if self.failed:
- return
+ return
# C.g:326:31: ( 'OPTIONAL' )?
alt41 = 2
LA41_0 = self.input.LA(1)
@@ -3599,7 +3599,7 @@ class CParser(Parser):
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_list977)
if self.failed:
- return
+ return
@@ -3607,7 +3607,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3627,7 +3627,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end parameter_list
@@ -3640,7 +3640,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28):
- return
+ return
# C.g:330:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER )
alt46 = 2
@@ -3657,7 +3657,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input)
@@ -3668,7 +3668,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input)
@@ -3680,7 +3680,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop43
alt43 = 3
@@ -3764,7 +3764,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
elif alt43 == 2:
@@ -3773,7 +3773,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3790,7 +3790,7 @@ class CParser(Parser):
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration1004)
if self.failed:
- return
+ return
@@ -3813,7 +3813,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3822,7 +3822,7 @@ class CParser(Parser):
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1016)
if self.failed:
- return
+ return
@@ -3835,7 +3835,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end parameter_declaration
@@ -3848,13 +3848,13 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29):
- return
+ return
# C.g:336:2: ( IDENTIFIER ( ',' IDENTIFIER )* )
# C.g:336:4: IDENTIFIER ( ',' IDENTIFIER )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1027)
if self.failed:
- return
+ return
# C.g:337:2: ( ',' IDENTIFIER )*
while True: #loop47
alt47 = 2
@@ -3868,10 +3868,10 @@ class CParser(Parser):
# C.g:337:3: ',' IDENTIFIER
self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1031)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1033)
if self.failed:
- return
+ return
else:
@@ -3891,7 +3891,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end identifier_list
@@ -3904,7 +3904,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30):
- return
+ return
# C.g:341:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id )
alt49 = 2
@@ -3922,7 +3922,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input)
@@ -3931,7 +3931,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input)
@@ -3943,7 +3943,7 @@ class CParser(Parser):
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:341:29: ( abstract_declarator )?
alt48 = 2
LA48_0 = self.input.LA(1)
@@ -3956,7 +3956,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -3968,7 +3968,7 @@ class CParser(Parser):
self.type_id()
self.following.pop()
if self.failed:
- return
+ return
@@ -3981,7 +3981,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_name
@@ -3994,7 +3994,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31):
- return
+ return
# C.g:346:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator )
alt51 = 2
@@ -4007,7 +4007,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input)
@@ -4019,7 +4019,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
# C.g:346:12: ( direct_abstract_declarator )?
alt50 = 2
LA50_0 = self.input.LA(1)
@@ -4204,7 +4204,7 @@ class CParser(Parser):
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -4216,7 +4216,7 @@ class CParser(Parser):
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -4229,7 +4229,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end abstract_declarator
@@ -4242,7 +4242,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32):
- return
+ return
# C.g:351:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* )
# C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )*
@@ -4264,7 +4264,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input)
@@ -4275,7 +4275,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input)
@@ -4286,7 +4286,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input)
@@ -4296,15 +4296,15 @@ class CParser(Parser):
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1086)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1088)
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_direct_abstract_declarator1090)
if self.failed:
- return
+ return
elif alt52 == 2:
@@ -4313,7 +4313,7 @@ class CParser(Parser):
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
@@ -4560,7 +4560,7 @@ class CParser(Parser):
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -4580,7 +4580,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end direct_abstract_declarator
@@ -4593,7 +4593,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33):
- return
+ return
# C.g:355:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' )
alt54 = 4
@@ -4609,7 +4609,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input)
@@ -4625,7 +4625,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input)
@@ -4634,7 +4634,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input)
@@ -4644,50 +4644,50 @@ class CParser(Parser):
# C.g:355:4: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1110)
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1112)
if self.failed:
- return
+ return
elif alt54 == 2:
# C.g:356:4: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1117)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1119)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1121)
if self.failed:
- return
+ return
elif alt54 == 3:
# C.g:357:4: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1126)
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1128)
if self.failed:
- return
+ return
elif alt54 == 4:
# C.g:358:4: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1133)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135)
self.parameter_type_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1137)
if self.failed:
- return
+ return
@@ -4700,7 +4700,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end abstract_declarator_suffix
@@ -4713,7 +4713,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34):
- return
+ return
# C.g:363:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' )
alt56 = 2
@@ -4726,7 +4726,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input)
@@ -4738,19 +4738,19 @@ class CParser(Parser):
self.assignment_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt56 == 2:
# C.g:364:4: '{' initializer_list ( ',' )? '}'
self.match(self.input, 43, self.FOLLOW_43_in_initializer1155)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_initializer_list_in_initializer1157)
self.initializer_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:364:25: ( ',' )?
alt55 = 2
LA55_0 = self.input.LA(1)
@@ -4761,13 +4761,13 @@ class CParser(Parser):
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_initializer1159)
if self.failed:
- return
+ return
self.match(self.input, 44, self.FOLLOW_44_in_initializer1162)
if self.failed:
- return
+ return
@@ -4780,7 +4780,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end initializer
@@ -4793,7 +4793,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35):
- return
+ return
# C.g:368:2: ( initializer ( ',' initializer )* )
# C.g:368:4: initializer ( ',' initializer )*
@@ -4801,7 +4801,7 @@ class CParser(Parser):
self.initializer()
self.following.pop()
if self.failed:
- return
+ return
# C.g:368:16: ( ',' initializer )*
while True: #loop57
alt57 = 2
@@ -4820,12 +4820,12 @@ class CParser(Parser):
# C.g:368:17: ',' initializer
self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1176)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_initializer_in_initializer_list1178)
self.initializer()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -4845,7 +4845,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end initializer_list
@@ -4956,7 +4956,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37):
- return
+ return
# C.g:378:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* )
# C.g:378:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )*
@@ -4966,7 +4966,7 @@ class CParser(Parser):
self.multiplicative_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -4985,24 +4985,24 @@ class CParser(Parser):
# C.g:378:33: '+' multiplicative_expression
self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1229)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1231)
self.multiplicative_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt61 == 2:
# C.g:378:65: '-' multiplicative_expression
self.match(self.input, 69, self.FOLLOW_69_in_additive_expression1235)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1237)
self.multiplicative_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -5022,7 +5022,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end additive_expression
@@ -5035,7 +5035,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38):
- return
+ return
# C.g:382:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* )
# C.g:382:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
@@ -5045,7 +5045,7 @@ class CParser(Parser):
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -5064,36 +5064,36 @@ class CParser(Parser):
# C.g:382:23: '*' cast_expression
self.match(self.input, 66, self.FOLLOW_66_in_multiplicative_expression1255)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1257)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt62 == 2:
# C.g:382:45: '/' cast_expression
self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1261)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1263)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt62 == 3:
# C.g:382:67: '%' cast_expression
self.match(self.input, 71, self.FOLLOW_71_in_multiplicative_expression1267)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1269)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -5113,7 +5113,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end multiplicative_expression
@@ -5126,7 +5126,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39):
- return
+ return
# C.g:386:2: ( '(' type_name ')' cast_expression | unary_expression )
alt63 = 2
@@ -5146,7 +5146,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 25, self.input)
@@ -5157,7 +5157,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input)
@@ -5168,7 +5168,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input)
@@ -5178,20 +5178,20 @@ class CParser(Parser):
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1282)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_type_name_in_cast_expression1284)
self.type_name()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_cast_expression1286)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_cast_expression1288)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt63 == 2:
@@ -5200,7 +5200,7 @@ class CParser(Parser):
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -5213,7 +5213,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end cast_expression
@@ -5226,7 +5226,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40):
- return
+ return
# C.g:391:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' )
alt64 = 6
@@ -5252,7 +5252,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input)
@@ -5263,7 +5263,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input)
@@ -5272,7 +5272,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input)
@@ -5284,31 +5284,31 @@ class CParser(Parser):
self.postfix_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 2:
# C.g:392:4: '++' unary_expression
self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1309)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1311)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 3:
# C.g:393:4: '--' unary_expression
self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1316)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1318)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 4:
@@ -5317,42 +5317,42 @@ class CParser(Parser):
self.unary_operator()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_unary_expression1325)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 5:
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1330)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1332)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 6:
# C.g:396:4: 'sizeof' '(' type_name ')'
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1337)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1339)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_type_name_in_unary_expression1341)
self.type_name()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_unary_expression1343)
if self.failed:
- return
+ return
@@ -5365,7 +5365,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end unary_expression
@@ -5385,13 +5385,13 @@ class CParser(Parser):
c = None
-
+
self.postfix_expression_stack[-1].FuncCallText = ''
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41):
- return
+ return
# C.g:406:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* )
# C.g:406:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
@@ -5399,7 +5399,7 @@ class CParser(Parser):
p = self.primary_expression()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
@@ -5461,26 +5461,26 @@ class CParser(Parser):
# C.g:407:13: '[' expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1383)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_postfix_expression1385)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1387)
if self.failed:
- return
+ return
elif alt65 == 2:
# C.g:408:13: '(' a= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1401)
if self.failed:
- return
+ return
a = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1405)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '')
@@ -5490,16 +5490,16 @@ class CParser(Parser):
# C.g:409:13: '(' c= argument_expression_list b= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1420)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1424)
c = self.argument_expression_list()
self.following.pop()
if self.failed:
- return
+ return
b = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1428)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
@@ -5509,26 +5509,26 @@ class CParser(Parser):
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1444)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1448)
if self.failed:
- return
+ return
elif alt65 == 5:
# C.g:411:13: '.' x= IDENTIFIER
self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1462)
if self.failed:
- return
+ return
x = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1466)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '.' + x.text
@@ -5538,11 +5538,11 @@ class CParser(Parser):
# C.g:412:13: '*' y= IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_postfix_expression1482)
if self.failed:
- return
+ return
y = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1486)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText = y.text
@@ -5552,11 +5552,11 @@ class CParser(Parser):
# C.g:413:13: '->' z= IDENTIFIER
self.match(self.input, 76, self.FOLLOW_76_in_postfix_expression1502)
if self.failed:
- return
+ return
z = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1506)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '->' + z.text
@@ -5566,14 +5566,14 @@ class CParser(Parser):
# C.g:414:13: '++'
self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1522)
if self.failed:
- return
+ return
elif alt65 == 9:
# C.g:415:13: '--'
self.match(self.input, 73, self.FOLLOW_73_in_postfix_expression1536)
if self.failed:
- return
+ return
else:
@@ -5594,7 +5594,7 @@ class CParser(Parser):
self.postfix_expression_stack.pop()
pass
- return
+ return
# $ANTLR end postfix_expression
@@ -5607,7 +5607,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42):
- return
+ return
# C.g:420:2: ( parameter_declaration ( ',' parameter_declaration )* )
# C.g:420:4: parameter_declaration ( ',' parameter_declaration )*
@@ -5615,7 +5615,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
# C.g:420:26: ( ',' parameter_declaration )*
while True: #loop66
alt66 = 2
@@ -5629,12 +5629,12 @@ class CParser(Parser):
# C.g:420:27: ',' parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1562)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1564)
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -5654,7 +5654,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end macro_parameter_list
@@ -5667,7 +5667,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43):
- return
+ return
# C.g:424:2: ( '&' | '*' | '+' | '-' | '~' | '!' )
# C.g:
@@ -5679,7 +5679,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -5701,7 +5701,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end unary_operator
@@ -5812,7 +5812,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45):
- return
+ return
# C.g:439:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL )
alt72 = 6
@@ -5832,7 +5832,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input)
@@ -5842,28 +5842,28 @@ class CParser(Parser):
# C.g:439:9: HEX_LITERAL
self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1643)
if self.failed:
- return
+ return
elif alt72 == 2:
# C.g:440:9: OCTAL_LITERAL
self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1653)
if self.failed:
- return
+ return
elif alt72 == 3:
# C.g:441:9: DECIMAL_LITERAL
self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1663)
if self.failed:
- return
+ return
elif alt72 == 4:
# C.g:442:7: CHARACTER_LITERAL
self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1671)
if self.failed:
- return
+ return
elif alt72 == 5:
@@ -5907,7 +5907,7 @@ class CParser(Parser):
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1680)
if self.failed:
- return
+ return
else:
@@ -5933,7 +5933,7 @@ class CParser(Parser):
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1683)
if self.failed:
- return
+ return
else:
@@ -5942,7 +5942,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(69, self.input)
raise eee
@@ -5958,7 +5958,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(70, self.input)
raise eee
@@ -5979,7 +5979,7 @@ class CParser(Parser):
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1688)
if self.failed:
- return
+ return
else:
@@ -5992,7 +5992,7 @@ class CParser(Parser):
# C.g:444:9: FLOATING_POINT_LITERAL
self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1699)
if self.failed:
- return
+ return
@@ -6005,7 +6005,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end constant
@@ -6088,7 +6088,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47):
- return
+ return
# C.g:454:2: ( conditional_expression )
# C.g:454:4: conditional_expression
@@ -6096,7 +6096,7 @@ class CParser(Parser):
self.conditional_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -6110,7 +6110,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end constant_expression
@@ -6123,7 +6123,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48):
- return
+ return
# C.g:458:2: ( lvalue assignment_operator assignment_expression | conditional_expression )
alt74 = 2
@@ -6140,7 +6140,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input)
@@ -6156,7 +6156,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input)
@@ -6172,7 +6172,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input)
@@ -6188,7 +6188,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input)
@@ -6204,7 +6204,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input)
@@ -6220,7 +6220,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input)
@@ -6236,7 +6236,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input)
@@ -6254,7 +6254,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input)
@@ -6270,7 +6270,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 22, self.input)
@@ -6281,7 +6281,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input)
@@ -6299,7 +6299,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input)
@@ -6315,7 +6315,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input)
@@ -6331,7 +6331,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input)
@@ -6347,7 +6347,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input)
@@ -6363,7 +6363,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input)
@@ -6379,7 +6379,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input)
@@ -6395,7 +6395,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input)
@@ -6408,7 +6408,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input)
@@ -6426,7 +6426,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input)
@@ -6442,7 +6442,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input)
@@ -6458,7 +6458,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input)
@@ -6474,7 +6474,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input)
@@ -6490,7 +6490,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input)
@@ -6506,7 +6506,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input)
@@ -6522,7 +6522,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input)
@@ -6535,7 +6535,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input)
@@ -6553,7 +6553,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input)
@@ -6569,7 +6569,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input)
@@ -6585,7 +6585,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input)
@@ -6601,7 +6601,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input)
@@ -6617,7 +6617,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input)
@@ -6633,7 +6633,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input)
@@ -6649,7 +6649,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input)
@@ -6662,7 +6662,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input)
@@ -6680,7 +6680,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input)
@@ -6696,7 +6696,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input)
@@ -6712,7 +6712,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input)
@@ -6728,7 +6728,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input)
@@ -6744,7 +6744,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input)
@@ -6760,7 +6760,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input)
@@ -6776,7 +6776,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input)
@@ -6789,7 +6789,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input)
@@ -6807,7 +6807,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input)
@@ -6823,7 +6823,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input)
@@ -6839,7 +6839,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input)
@@ -6855,7 +6855,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input)
@@ -6871,7 +6871,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input)
@@ -6887,7 +6887,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input)
@@ -6903,7 +6903,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input)
@@ -6919,7 +6919,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input)
@@ -6937,7 +6937,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 189, self.input)
@@ -6948,7 +6948,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input)
@@ -6966,7 +6966,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input)
@@ -6982,7 +6982,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input)
@@ -6998,7 +6998,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input)
@@ -7014,7 +7014,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input)
@@ -7030,7 +7030,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input)
@@ -7046,7 +7046,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input)
@@ -7062,7 +7062,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input)
@@ -7075,7 +7075,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input)
@@ -7093,7 +7093,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input)
@@ -7109,7 +7109,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input)
@@ -7125,7 +7125,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input)
@@ -7141,7 +7141,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input)
@@ -7157,7 +7157,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input)
@@ -7173,7 +7173,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input)
@@ -7189,7 +7189,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input)
@@ -7205,7 +7205,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input)
@@ -7221,7 +7221,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input)
@@ -7237,7 +7237,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input)
@@ -7253,7 +7253,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input)
@@ -7269,7 +7269,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input)
@@ -7280,7 +7280,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input)
@@ -7298,7 +7298,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input)
@@ -7314,7 +7314,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input)
@@ -7330,7 +7330,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input)
@@ -7346,7 +7346,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input)
@@ -7362,7 +7362,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input)
@@ -7378,7 +7378,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input)
@@ -7394,7 +7394,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input)
@@ -7410,7 +7410,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input)
@@ -7426,7 +7426,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input)
@@ -7442,7 +7442,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input)
@@ -7458,7 +7458,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input)
@@ -7474,7 +7474,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input)
@@ -7483,7 +7483,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input)
@@ -7501,7 +7501,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input)
@@ -7517,7 +7517,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input)
@@ -7533,7 +7533,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input)
@@ -7549,7 +7549,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input)
@@ -7565,7 +7565,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input)
@@ -7581,7 +7581,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input)
@@ -7597,7 +7597,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input)
@@ -7613,7 +7613,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input)
@@ -7629,7 +7629,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input)
@@ -7645,7 +7645,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input)
@@ -7661,7 +7661,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input)
@@ -7677,7 +7677,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input)
@@ -7686,7 +7686,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input)
@@ -7704,7 +7704,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input)
@@ -7720,7 +7720,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input)
@@ -7736,7 +7736,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input)
@@ -7752,7 +7752,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input)
@@ -7768,7 +7768,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input)
@@ -7784,7 +7784,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input)
@@ -7800,7 +7800,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input)
@@ -7816,7 +7816,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input)
@@ -7832,7 +7832,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input)
@@ -7848,7 +7848,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input)
@@ -7864,7 +7864,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input)
@@ -7880,7 +7880,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input)
@@ -7889,7 +7889,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input)
@@ -7907,7 +7907,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input)
@@ -7923,7 +7923,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input)
@@ -7939,7 +7939,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input)
@@ -7955,7 +7955,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input)
@@ -7971,7 +7971,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input)
@@ -7987,7 +7987,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input)
@@ -8003,7 +8003,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input)
@@ -8019,7 +8019,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input)
@@ -8035,7 +8035,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input)
@@ -8051,7 +8051,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input)
@@ -8067,7 +8067,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input)
@@ -8083,7 +8083,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input)
@@ -8092,7 +8092,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input)
@@ -8101,7 +8101,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input)
@@ -8113,17 +8113,17 @@ class CParser(Parser):
self.lvalue()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1746)
self.assignment_operator()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1748)
self.assignment_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt74 == 2:
@@ -8132,7 +8132,7 @@ class CParser(Parser):
self.conditional_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -8145,7 +8145,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end assignment_expression
@@ -8158,7 +8158,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49):
- return
+ return
# C.g:463:2: ( unary_expression )
# C.g:463:4: unary_expression
@@ -8166,7 +8166,7 @@ class CParser(Parser):
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -8180,7 +8180,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end lvalue
@@ -8193,7 +8193,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50):
- return
+ return
# C.g:467:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' )
# C.g:
@@ -8205,7 +8205,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8227,7 +8227,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end assignment_operator
@@ -8243,7 +8243,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51):
- return
+ return
# C.g:481:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? )
# C.g:481:4: e= logical_or_expression ( '?' expression ':' conditional_expression )?
@@ -8251,7 +8251,7 @@ class CParser(Parser):
e = self.logical_or_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:481:28: ( '?' expression ':' conditional_expression )?
alt75 = 2
LA75_0 = self.input.LA(1)
@@ -8262,20 +8262,20 @@ class CParser(Parser):
# C.g:481:29: '?' expression ':' conditional_expression
self.match(self.input, 90, self.FOLLOW_90_in_conditional_expression1842)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_conditional_expression1844)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1846)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1848)
self.conditional_expression()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -8295,7 +8295,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end conditional_expression
@@ -8378,7 +8378,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53):
- return
+ return
# C.g:489:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* )
# C.g:489:4: inclusive_or_expression ( '&&' inclusive_or_expression )*
@@ -8386,7 +8386,7 @@ class CParser(Parser):
self.inclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:489:28: ( '&&' inclusive_or_expression )*
while True: #loop77
alt77 = 2
@@ -8400,12 +8400,12 @@ class CParser(Parser):
# C.g:489:29: '&&' inclusive_or_expression
self.match(self.input, 92, self.FOLLOW_92_in_logical_and_expression1884)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1886)
self.inclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8425,7 +8425,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end logical_and_expression
@@ -8438,7 +8438,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54):
- return
+ return
# C.g:493:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* )
# C.g:493:4: exclusive_or_expression ( '|' exclusive_or_expression )*
@@ -8446,7 +8446,7 @@ class CParser(Parser):
self.exclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:493:28: ( '|' exclusive_or_expression )*
while True: #loop78
alt78 = 2
@@ -8460,12 +8460,12 @@ class CParser(Parser):
# C.g:493:29: '|' exclusive_or_expression
self.match(self.input, 93, self.FOLLOW_93_in_inclusive_or_expression1902)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904)
self.exclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8485,7 +8485,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end inclusive_or_expression
@@ -8498,7 +8498,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55):
- return
+ return
# C.g:497:2: ( and_expression ( '^' and_expression )* )
# C.g:497:4: and_expression ( '^' and_expression )*
@@ -8506,7 +8506,7 @@ class CParser(Parser):
self.and_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:497:19: ( '^' and_expression )*
while True: #loop79
alt79 = 2
@@ -8520,12 +8520,12 @@ class CParser(Parser):
# C.g:497:20: '^' and_expression
self.match(self.input, 94, self.FOLLOW_94_in_exclusive_or_expression1920)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1922)
self.and_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8545,7 +8545,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end exclusive_or_expression
@@ -8558,7 +8558,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56):
- return
+ return
# C.g:501:2: ( equality_expression ( '&' equality_expression )* )
# C.g:501:4: equality_expression ( '&' equality_expression )*
@@ -8566,7 +8566,7 @@ class CParser(Parser):
self.equality_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:501:24: ( '&' equality_expression )*
while True: #loop80
alt80 = 2
@@ -8580,12 +8580,12 @@ class CParser(Parser):
# C.g:501:25: '&' equality_expression
self.match(self.input, 77, self.FOLLOW_77_in_and_expression1938)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_equality_expression_in_and_expression1940)
self.equality_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8605,7 +8605,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end and_expression
@@ -8618,7 +8618,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57):
- return
+ return
# C.g:504:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* )
# C.g:504:4: relational_expression ( ( '==' | '!=' ) relational_expression )*
@@ -8626,7 +8626,7 @@ class CParser(Parser):
self.relational_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:504:26: ( ( '==' | '!=' ) relational_expression )*
while True: #loop81
alt81 = 2
@@ -8646,7 +8646,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8659,7 +8659,7 @@ class CParser(Parser):
self.relational_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8679,7 +8679,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end equality_expression
@@ -8692,7 +8692,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58):
- return
+ return
# C.g:508:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* )
# C.g:508:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
@@ -8700,7 +8700,7 @@ class CParser(Parser):
self.shift_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:508:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
while True: #loop82
alt82 = 2
@@ -8720,7 +8720,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8733,7 +8733,7 @@ class CParser(Parser):
self.shift_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8753,7 +8753,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end relational_expression
@@ -8766,7 +8766,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59):
- return
+ return
# C.g:512:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* )
# C.g:512:4: additive_expression ( ( '<<' | '>>' ) additive_expression )*
@@ -8774,7 +8774,7 @@ class CParser(Parser):
self.additive_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:512:24: ( ( '<<' | '>>' ) additive_expression )*
while True: #loop83
alt83 = 2
@@ -8794,7 +8794,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8807,7 +8807,7 @@ class CParser(Parser):
self.additive_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8827,7 +8827,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end shift_expression
@@ -8840,7 +8840,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60):
- return
+ return
# C.g:518:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration )
alt84 = 11
@@ -8861,7 +8861,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 43, self.input)
@@ -8881,7 +8881,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input)
@@ -8897,7 +8897,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 53, self.input)
@@ -8913,7 +8913,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 68, self.input)
@@ -8924,7 +8924,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input)
@@ -8953,7 +8953,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input)
@@ -8965,7 +8965,7 @@ class CParser(Parser):
self.labeled_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 2:
@@ -8974,7 +8974,7 @@ class CParser(Parser):
self.compound_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 3:
@@ -8983,7 +8983,7 @@ class CParser(Parser):
self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 4:
@@ -8992,7 +8992,7 @@ class CParser(Parser):
self.selection_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 5:
@@ -9001,7 +9001,7 @@ class CParser(Parser):
self.iteration_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 6:
@@ -9010,7 +9010,7 @@ class CParser(Parser):
self.jump_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 7:
@@ -9019,7 +9019,7 @@ class CParser(Parser):
self.macro_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 8:
@@ -9028,7 +9028,7 @@ class CParser(Parser):
self.asm2_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 9:
@@ -9037,7 +9037,7 @@ class CParser(Parser):
self.asm1_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 10:
@@ -9046,7 +9046,7 @@ class CParser(Parser):
self.asm_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 11:
@@ -9055,7 +9055,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
@@ -9068,7 +9068,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end statement
@@ -9081,7 +9081,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61):
- return
+ return
# C.g:532:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' )
# C.g:532:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';'
@@ -9095,16 +9095,16 @@ class CParser(Parser):
# C.g:0:0: '__asm__'
self.match(self.input, 103, self.FOLLOW_103_in_asm2_statement2086)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2089)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2091)
if self.failed:
- return
+ return
# C.g:532:30: (~ ( ';' ) )*
while True: #loop86
alt86 = 2
@@ -9131,7 +9131,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -9148,10 +9148,10 @@ class CParser(Parser):
self.match(self.input, 63, self.FOLLOW_63_in_asm2_statement2101)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2103)
if self.failed:
- return
+ return
@@ -9165,7 +9165,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end asm2_statement
@@ -9178,16 +9178,16 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62):
- return
+ return
# C.g:536:2: ( '_asm' '{' (~ ( '}' ) )* '}' )
# C.g:536:4: '_asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 104, self.FOLLOW_104_in_asm1_statement2115)
if self.failed:
- return
+ return
self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2117)
if self.failed:
- return
+ return
# C.g:536:15: (~ ( '}' ) )*
while True: #loop87
alt87 = 2
@@ -9207,7 +9207,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -9224,7 +9224,7 @@ class CParser(Parser):
self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2127)
if self.failed:
- return
+ return
@@ -9238,7 +9238,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end asm1_statement
@@ -9251,16 +9251,16 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63):
- return
+ return
# C.g:540:2: ( '__asm' '{' (~ ( '}' ) )* '}' )
# C.g:540:4: '__asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 105, self.FOLLOW_105_in_asm_statement2138)
if self.failed:
- return
+ return
self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2140)
if self.failed:
- return
+ return
# C.g:540:16: (~ ( '}' ) )*
while True: #loop88
alt88 = 2
@@ -9280,7 +9280,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -9297,7 +9297,7 @@ class CParser(Parser):
self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2150)
if self.failed:
- return
+ return
@@ -9311,7 +9311,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end asm_statement
@@ -9324,16 +9324,16 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64):
- return
+ return
# C.g:544:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' )
# C.g:544:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')'
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2162)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2164)
if self.failed:
- return
+ return
# C.g:544:19: ( declaration )*
while True: #loop89
alt89 = 2
@@ -11235,7 +11235,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -12441,7 +12441,7 @@ class CParser(Parser):
self.statement_list()
self.following.pop()
if self.failed:
- return
+ return
@@ -12457,13 +12457,13 @@ class CParser(Parser):
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_macro_statement2176)
if self.failed:
- return
+ return
@@ -12477,7 +12477,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end macro_statement
@@ -12490,7 +12490,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65):
- return
+ return
# C.g:548:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement )
alt92 = 3
@@ -12504,7 +12504,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input)
@@ -12514,50 +12514,50 @@ class CParser(Parser):
# C.g:548:4: IDENTIFIER ':' statement
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2188)
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2190)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_labeled_statement2192)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt92 == 2:
# C.g:549:4: 'case' constant_expression ':' statement
self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2197)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2199)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2201)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_labeled_statement2203)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt92 == 3:
# C.g:550:4: 'default' ':' statement
self.match(self.input, 107, self.FOLLOW_107_in_labeled_statement2208)
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2210)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_labeled_statement2212)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
@@ -12570,7 +12570,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end labeled_statement
@@ -14553,7 +14553,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67):
- return
+ return
# C.g:558:2: ( ( statement )+ )
# C.g:558:4: ( statement )+
@@ -16231,7 +16231,7 @@ class CParser(Parser):
self.statement()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -16240,7 +16240,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(95, self.input)
raise eee
@@ -16261,7 +16261,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end statement_list
@@ -16348,7 +16348,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69):
- return
+ return
# C.g:567:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement )
alt98 = 2
@@ -16361,7 +16361,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input)
@@ -16371,18 +16371,18 @@ class CParser(Parser):
# C.g:567:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )?
self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2272)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2274)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_selection_statement2278)
e = self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2280)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16390,7 +16390,7 @@ class CParser(Parser):
self.statement()
self.following.pop()
if self.failed:
- return
+ return
# C.g:567:167: ( options {k=1; backtrack=false; } : 'else' statement )?
alt97 = 2
LA97_0 = self.input.LA(1)
@@ -16401,12 +16401,12 @@ class CParser(Parser):
# C.g:567:200: 'else' statement
self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2299)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_selection_statement2301)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
@@ -16416,23 +16416,23 @@ class CParser(Parser):
# C.g:568:4: 'switch' '(' expression ')' statement
self.match(self.input, 110, self.FOLLOW_110_in_selection_statement2308)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2310)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_selection_statement2312)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2314)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_selection_statement2316)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
@@ -16445,7 +16445,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end selection_statement
@@ -16461,7 +16461,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70):
- return
+ return
# C.g:572:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement )
alt100 = 3
@@ -16475,7 +16475,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input)
@@ -16485,23 +16485,23 @@ class CParser(Parser):
# C.g:572:4: 'while' '(' e= expression ')' statement
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2327)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2329)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_iteration_statement2333)
e = self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2335)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_iteration_statement2337)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16511,29 +16511,29 @@ class CParser(Parser):
# C.g:573:4: 'do' statement 'while' '(' e= expression ')' ';'
self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2344)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_iteration_statement2346)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2348)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2350)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_iteration_statement2354)
e = self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2356)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2358)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16543,20 +16543,20 @@ class CParser(Parser):
# C.g:574:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement
self.match(self.input, 113, self.FOLLOW_113_in_iteration_statement2365)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2367)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2369)
self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2373)
e = self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
# C.g:574:58: ( expression )?
alt99 = 2
LA99_0 = self.input.LA(1)
@@ -16569,18 +16569,18 @@ class CParser(Parser):
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2378)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_iteration_statement2380)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16596,7 +16596,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end iteration_statement
@@ -16609,7 +16609,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71):
- return
+ return
# C.g:578:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' )
alt101 = 5
@@ -16630,7 +16630,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input)
@@ -16639,7 +16639,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input)
@@ -16649,58 +16649,58 @@ class CParser(Parser):
# C.g:578:4: 'goto' IDENTIFIER ';'
self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2393)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2395)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2397)
if self.failed:
- return
+ return
elif alt101 == 2:
# C.g:579:4: 'continue' ';'
self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2402)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2404)
if self.failed:
- return
+ return
elif alt101 == 3:
# C.g:580:4: 'break' ';'
self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2409)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2411)
if self.failed:
- return
+ return
elif alt101 == 4:
# C.g:581:4: 'return' ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2416)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2418)
if self.failed:
- return
+ return
elif alt101 == 5:
# C.g:582:4: 'return' expression ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2423)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_jump_statement2425)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2427)
if self.failed:
- return
+ return
@@ -16713,7 +16713,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end jump_statement
@@ -16725,7 +16725,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred2
@@ -16856,7 +16856,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
@@ -16864,7 +16864,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:119:41: ( declaration )*
while True: #loop103
alt103 = 2
@@ -16880,7 +16880,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -16889,7 +16889,7 @@ class CParser(Parser):
self.match(self.input, 43, self.FOLLOW_43_in_synpred4108)
if self.failed:
- return
+ return
# $ANTLR end synpred4
@@ -16904,7 +16904,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred5
@@ -16919,7 +16919,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred7
@@ -16934,7 +16934,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred10
@@ -16949,7 +16949,7 @@ class CParser(Parser):
self.type_specifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred14
@@ -16964,7 +16964,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred15
@@ -16979,7 +16979,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred33
@@ -16992,7 +16992,7 @@ class CParser(Parser):
# C.g:225:5: IDENTIFIER ( type_qualifier )* declarator
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34442)
if self.failed:
- return
+ return
# C.g:225:16: ( type_qualifier )*
while True: #loop106
alt106 = 2
@@ -17027,7 +17027,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -17038,7 +17038,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred34
@@ -17053,7 +17053,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred39
@@ -17068,7 +17068,7 @@ class CParser(Parser):
self.type_specifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred40
@@ -17091,7 +17091,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
@@ -17105,7 +17105,7 @@ class CParser(Parser):
# C.g:297:14: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred66788)
if self.failed:
- return
+ return
@@ -17119,7 +17119,7 @@ class CParser(Parser):
# C.g:297:26: 'EFI_BOOTSERVICE'
self.match(self.input, 59, self.FOLLOW_59_in_synpred66793)
if self.failed:
- return
+ return
@@ -17133,7 +17133,7 @@ class CParser(Parser):
# C.g:297:47: 'EFI_RUNTIMESERVICE'
self.match(self.input, 60, self.FOLLOW_60_in_synpred66798)
if self.failed:
- return
+ return
@@ -17141,7 +17141,7 @@ class CParser(Parser):
self.direct_declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred66
@@ -17156,7 +17156,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred67
@@ -17169,7 +17169,7 @@ class CParser(Parser):
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred69830)
if self.failed:
- return
+ return
# $ANTLR end synpred69
@@ -17184,7 +17184,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred70
@@ -17197,15 +17197,15 @@ class CParser(Parser):
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred73878)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_type_list_in_synpred73880)
self.parameter_type_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred73882)
if self.failed:
- return
+ return
# $ANTLR end synpred73
@@ -17218,15 +17218,15 @@ class CParser(Parser):
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred74892)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_identifier_list_in_synpred74894)
self.identifier_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred74896)
if self.failed:
- return
+ return
# $ANTLR end synpred74
@@ -17241,7 +17241,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred75
@@ -17256,7 +17256,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred76
@@ -17269,7 +17269,7 @@ class CParser(Parser):
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_synpred77919)
if self.failed:
- return
+ return
# C.g:316:8: ( type_qualifier )+
cnt116 = 0
while True: #loop116
@@ -17286,7 +17286,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -17295,7 +17295,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(116, self.input)
raise eee
@@ -17315,7 +17315,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
@@ -17331,12 +17331,12 @@ class CParser(Parser):
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_synpred78930)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_pointer_in_synpred78932)
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred78
@@ -17349,7 +17349,7 @@ class CParser(Parser):
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred81977)
if self.failed:
- return
+ return
# $ANTLR end synpred81
@@ -17362,7 +17362,7 @@ class CParser(Parser):
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_synpred82974)
if self.failed:
- return
+ return
# C.g:326:31: ( 'OPTIONAL' )?
alt119 = 2
LA119_0 = self.input.LA(1)
@@ -17376,7 +17376,7 @@ class CParser(Parser):
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred82977)
if self.failed:
- return
+ return
@@ -17384,7 +17384,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred82
@@ -17399,7 +17399,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred83
@@ -17414,7 +17414,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred84
@@ -17429,7 +17429,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop120
alt120 = 3
@@ -17513,7 +17513,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
elif alt120 == 2:
@@ -17522,7 +17522,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -17539,7 +17539,7 @@ class CParser(Parser):
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred861004)
if self.failed:
- return
+ return
@@ -17557,7 +17557,7 @@ class CParser(Parser):
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:341:29: ( abstract_declarator )?
alt122 = 2
LA122_0 = self.input.LA(1)
@@ -17570,7 +17570,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -17588,7 +17588,7 @@ class CParser(Parser):
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred91
@@ -17601,15 +17601,15 @@ class CParser(Parser):
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred931086)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_abstract_declarator_in_synpred931088)
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred931090)
if self.failed:
- return
+ return
# $ANTLR end synpred93
@@ -17624,7 +17624,7 @@ class CParser(Parser):
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred94
@@ -17637,20 +17637,20 @@ class CParser(Parser):
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_synpred1091282)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_type_name_in_synpred1091284)
self.type_name()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1091286)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_synpred1091288)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred109
@@ -17663,12 +17663,12 @@ class CParser(Parser):
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_synpred1141330)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_synpred1141332)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred114
@@ -17681,15 +17681,15 @@ class CParser(Parser):
# C.g:409:13: '(' argument_expression_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1171420)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_argument_expression_list_in_synpred1171424)
self.argument_expression_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1171428)
if self.failed:
- return
+ return
# $ANTLR end synpred117
@@ -17702,15 +17702,15 @@ class CParser(Parser):
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1181444)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1181446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1181448)
if self.failed:
- return
+ return
# $ANTLR end synpred118
@@ -17723,10 +17723,10 @@ class CParser(Parser):
# C.g:412:13: '*' IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_synpred1201482)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1201486)
if self.failed:
- return
+ return
# $ANTLR end synpred120
@@ -17739,7 +17739,7 @@ class CParser(Parser):
# C.g:443:20: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371683)
if self.failed:
- return
+ return
# $ANTLR end synpred137
@@ -17763,7 +17763,7 @@ class CParser(Parser):
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1381680)
if self.failed:
- return
+ return
else:
@@ -17784,7 +17784,7 @@ class CParser(Parser):
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1381683)
if self.failed:
- return
+ return
else:
@@ -17793,7 +17793,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(126, self.input)
raise eee
@@ -17815,17 +17815,17 @@ class CParser(Parser):
self.lvalue()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_operator_in_synpred1421746)
self.assignment_operator()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_expression_in_synpred1421748)
self.assignment_expression()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred142
@@ -17840,7 +17840,7 @@ class CParser(Parser):
self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred169
@@ -17855,7 +17855,7 @@ class CParser(Parser):
self.macro_statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred173
@@ -17870,7 +17870,7 @@ class CParser(Parser):
self.asm2_statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred174
@@ -17885,7 +17885,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred181
@@ -17900,7 +17900,7 @@ class CParser(Parser):
self.statement_list()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred182
@@ -17915,7 +17915,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred186
@@ -17930,7 +17930,7 @@ class CParser(Parser):
self.statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred188
@@ -18389,7 +18389,7 @@ class CParser(Parser):
-
+
FOLLOW_external_declaration_in_translation_unit74 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_function_definition_in_external_declaration113 = frozenset([1])
diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py
index ea739043e0..540d9cb7ed 100644
--- a/BaseTools/Source/Python/Ecc/Check.py
+++ b/BaseTools/Source/Python/Ecc/Check.py
@@ -563,17 +563,17 @@ class Check(object):
op = open(FullName).readlines()
FileLinesList = op
LineNo = 0
- CurrentSection = MODEL_UNKNOWN
+ CurrentSection = MODEL_UNKNOWN
HeaderSectionLines = []
- HeaderCommentStart = False
+ HeaderCommentStart = False
HeaderCommentEnd = False
-
+
for Line in FileLinesList:
LineNo = LineNo + 1
Line = Line.strip()
if (LineNo < len(FileLinesList) - 1):
NextLine = FileLinesList[LineNo].strip()
-
+
#
# blank line
#
@@ -600,8 +600,8 @@ class Check(object):
#
HeaderSectionLines.append((Line, LineNo))
HeaderCommentStart = True
- continue
-
+ continue
+
#
# Collect Header content.
#
@@ -635,7 +635,7 @@ class Check(object):
if EccGlobalData.gConfig.HeaderCheckFileCommentEnd == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
-
+
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
def DoxygenCheckFunctionHeader(self):
@@ -827,7 +827,7 @@ class Check(object):
for FilePath in FilePathList:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, OtherMsg="The Library Class [%s] is duplicated in '%s' line %s and line %s." % (Record[1], FilePath, Record[3], Record[4]), BelongsToTable='Dsc', BelongsToItem=Record[0])
-
+
# Check the header file in Include\Library directory whether be defined in the package DEC file.
def MetaDataFileCheckLibraryDefinedInDec(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryDefinedInDec == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
@@ -842,9 +842,9 @@ class Check(object):
if not LibraryDec:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, LibraryInInf):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, \
- OtherMsg="The Library Class [%s] in %s line is not defined in the associated package file." % (LibraryInInf, Line),
+ OtherMsg="The Library Class [%s] in %s line is not defined in the associated package file." % (LibraryInInf, Line),
BelongsToTable='Inf', BelongsToItem=ID)
-
+
# Check whether an Inf file is specified in the FDF file, but not in the Dsc file, then the Inf file must be for a Binary module only
def MetaDataFileCheckBinaryInfInFdf(self):
if EccGlobalData.gConfig.MetaDataFileCheckBinaryInfInFdf == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
@@ -1244,7 +1244,7 @@ class Check(object):
group by A.ID
""" % (Table.Table, Table.Table, Model, Model)
RecordSet = Table.Exec(SqlCommand)
- for Record in RecordSet:
+ for Record in RecordSet:
if not EccGlobalData.gException.IsException(ErrorID, Record[2]):
EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s value [%s] is used more than one time" % (Name.upper(), Record[2]), BelongsToTable=Table.Table, BelongsToItem=Record[0])
diff --git a/BaseTools/Source/Python/Ecc/CodeFragment.py b/BaseTools/Source/Python/Ecc/CodeFragment.py
index 3bf1c45150..b4fe9bed96 100644
--- a/BaseTools/Source/Python/Ecc/CodeFragment.py
+++ b/BaseTools/Source/Python/Ecc/CodeFragment.py
@@ -1,7 +1,7 @@
## @file
# fragments of source file
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -161,5 +161,5 @@ class FunctionCalling:
self.FuncName = Name
self.ParamList = Param
self.StartPos = Begin
- self.EndPos = End
- \ No newline at end of file
+ self.EndPos = End
+
diff --git a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
index 2efae2c7c1..b4f421342f 100644
--- a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
+++ b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
@@ -47,7 +47,7 @@ from ParserWarning import Warning
T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
-SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
@@ -59,7 +59,7 @@ SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
#
# GetNext*** procedures mean these procedures will get next token first, then make judgement.
# Get*** procedures mean these procedures will make judgement on current token only.
-#
+#
class CodeFragmentCollector:
## The constructor
#
@@ -89,7 +89,7 @@ class CodeFragmentCollector:
SizeOfLastLine = NumberOfLines
if NumberOfLines > 0:
SizeOfLastLine = len(self.Profile.FileLinesList[-1])
-
+
if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
return True
elif self.CurrentLineNumber > NumberOfLines:
@@ -111,7 +111,7 @@ class CodeFragmentCollector:
return True
else:
return False
-
+
## Rewind() method
#
# Reset file data buffer to the initial state
@@ -121,7 +121,7 @@ class CodeFragmentCollector:
def Rewind(self):
self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
-
+
## __UndoOneChar() method
#
# Go back one char in the file buffer
@@ -129,9 +129,9 @@ class CodeFragmentCollector:
# @param self The object pointer
# @retval True Successfully go back one char
# @retval False Not able to go back one char as file beginning reached
- #
+ #
def __UndoOneChar(self):
-
+
if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
return False
elif self.CurrentOffsetWithinLine == 0:
@@ -140,13 +140,13 @@ class CodeFragmentCollector:
else:
self.CurrentOffsetWithinLine -= 1
return True
-
+
## __GetOneChar() method
#
# Move forward one char in the file buffer
#
# @param self The object pointer
- #
+ #
def __GetOneChar(self):
if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
self.CurrentLineNumber += 1
@@ -160,13 +160,13 @@ class CodeFragmentCollector:
#
# @param self The object pointer
# @retval Char Current char
- #
+ #
def __CurrentChar(self):
CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
# if CurrentChar > 255:
# raise Warning("Non-Ascii char found At Line %d, offset %d" % (self.CurrentLineNumber, self.CurrentOffsetWithinLine), self.FileName, self.CurrentLineNumber)
return CurrentChar
-
+
## __NextChar() method
#
# Get the one char pass the char pointed to by the file buffer pointer
@@ -179,7 +179,7 @@ class CodeFragmentCollector:
return self.Profile.FileLinesList[self.CurrentLineNumber][0]
else:
return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
-
+
## __SetCurrentCharValue() method
#
# Modify the value of current char
@@ -189,7 +189,7 @@ class CodeFragmentCollector:
#
def __SetCurrentCharValue(self, Value):
self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
-
+
## __SetCharValue() method
#
# Modify the value of current char
@@ -199,7 +199,7 @@ class CodeFragmentCollector:
#
def __SetCharValue(self, Line, Offset, Value):
self.Profile.FileLinesList[Line - 1][Offset] = Value
-
+
## __CurrentLine() method
#
# Get the list that contains current line contents
@@ -209,7 +209,7 @@ class CodeFragmentCollector:
#
def __CurrentLine(self):
return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
-
+
## __InsertComma() method
#
# Insert ',' to replace PP
@@ -218,24 +218,24 @@ class CodeFragmentCollector:
# @retval List current line contents
#
def __InsertComma(self, Line):
-
-
+
+
if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
return
-
+
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
return
-
+
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
return
-
+
if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
return
-
+
self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
-
+
## PreprocessFile() method
#
# Preprocess file contents, replace comments with spaces.
@@ -244,7 +244,7 @@ class CodeFragmentCollector:
# !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
#
# @param self The object pointer
- #
+ #
def PreprocessFile(self):
self.Rewind()
@@ -256,14 +256,14 @@ class CodeFragmentCollector:
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
- InCharLiteral = False
-
+ InCharLiteral = False
+
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
-
+
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
-
+
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
@@ -274,9 +274,9 @@ class CodeFragmentCollector:
PPExtend = True
else:
PPExtend = False
-
+
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
-
+
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
@@ -291,17 +291,17 @@ class CodeFragmentCollector:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
-
+
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
-
+
if InComment and not DoubleSlashComment and not HashComment:
CommentObj.Content += T_CHAR_LF
self.CurrentLineNumber += 1
- self.CurrentOffsetWithinLine = 0
+ self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
CommentObj.Content += self.__CurrentChar()
@@ -315,7 +315,7 @@ class CodeFragmentCollector:
self.__GetOneChar()
InComment = False
# set comments to spaces
- elif InComment:
+ elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
@@ -341,7 +341,7 @@ class CodeFragmentCollector:
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
- HashComment = True
+ HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
@@ -355,9 +355,9 @@ class CodeFragmentCollector:
InComment = True
else:
self.__GetOneChar()
-
+
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
-
+
if InComment and DoubleSlashComment:
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
@@ -378,14 +378,14 @@ class CodeFragmentCollector:
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
- InCharLiteral = False
+ InCharLiteral = False
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
-
+
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
-
+
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
@@ -396,9 +396,9 @@ class CodeFragmentCollector:
PPExtend = True
else:
PPExtend = False
-
+
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
-
+
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
@@ -413,17 +413,17 @@ class CodeFragmentCollector:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
-
+
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
-
+
if InComment and not DoubleSlashComment and not HashComment:
CommentObj.Content += T_CHAR_LF
self.CurrentLineNumber += 1
- self.CurrentOffsetWithinLine = 0
+ self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
CommentObj.Content += self.__CurrentChar()
@@ -437,7 +437,7 @@ class CodeFragmentCollector:
self.__GetOneChar()
InComment = False
# set comments to spaces
- elif InComment:
+ elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
@@ -463,7 +463,7 @@ class CodeFragmentCollector:
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
- HashComment = True
+ HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
@@ -479,7 +479,7 @@ class CodeFragmentCollector:
self.__GetOneChar()
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
-
+
if InComment and DoubleSlashComment:
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
@@ -507,7 +507,7 @@ class CodeFragmentCollector:
tStream = antlr3.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
-
+
def ParseFileWithClearedPPDirective(self):
self.PreprocessFileWithClear()
# restore from ListOfList to ListOfString
@@ -520,7 +520,7 @@ class CodeFragmentCollector:
tStream = antlr3.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
-
+
def CleanFileProfileBuffer(self):
FileProfile.CommentList = []
FileProfile.PPDirectiveList = []
@@ -531,61 +531,61 @@ class CodeFragmentCollector:
FileProfile.StructUnionDefinitionList = []
FileProfile.TypedefDefinitionList = []
FileProfile.FunctionCallingList = []
-
+
def PrintFragments(self):
-
+
print('################# ' + self.FileName + '#####################')
-
+
print('/****************************************/')
print('/*************** COMMENTS ***************/')
print('/****************************************/')
for comment in FileProfile.CommentList:
print(str(comment.StartPos) + comment.Content)
-
+
print('/****************************************/')
print('/********* PREPROCESS DIRECTIVES ********/')
print('/****************************************/')
for pp in FileProfile.PPDirectiveList:
print(str(pp.StartPos) + pp.Content)
-
+
print('/****************************************/')
print('/********* VARIABLE DECLARATIONS ********/')
print('/****************************************/')
for var in FileProfile.VariableDeclarationList:
print(str(var.StartPos) + var.Modifier + ' '+ var.Declarator)
-
+
print('/****************************************/')
print('/********* FUNCTION DEFINITIONS *********/')
print('/****************************************/')
for func in FileProfile.FunctionDefinitionList:
print(str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos))
-
+
print('/****************************************/')
print('/************ ENUMERATIONS **************/')
print('/****************************************/')
for enum in FileProfile.EnumerationDefinitionList:
print(str(enum.StartPos) + enum.Content)
-
+
print('/****************************************/')
print('/*********** STRUCTS/UNIONS *************/')
print('/****************************************/')
for su in FileProfile.StructUnionDefinitionList:
print(str(su.StartPos) + su.Content)
-
+
print('/****************************************/')
print('/********* PREDICATE EXPRESSIONS ********/')
print('/****************************************/')
for predexp in FileProfile.PredicateExpressionList:
print(str(predexp.StartPos) + predexp.Content)
-
+
print('/****************************************/')
print('/************** TYPEDEFS ****************/')
print('/****************************************/')
for typedef in FileProfile.TypedefDefinitionList:
print(str(typedef.StartPos) + typedef.ToType)
-
+
if __name__ == "__main__":
-
+
collector = CodeFragmentCollector(sys.argv[1])
collector.PreprocessFile()
print("For Test.")
diff --git a/BaseTools/Source/Python/Ecc/Configuration.py b/BaseTools/Source/Python/Ecc/Configuration.py
index 4711bbd54f..29a1220761 100644
--- a/BaseTools/Source/Python/Ecc/Configuration.py
+++ b/BaseTools/Source/Python/Ecc/Configuration.py
@@ -215,7 +215,7 @@ class Configuration(object):
self.HeaderCheckCFileCommentReferenceFormat = 1
# Check whether C File header Comment have the License immediately after the ""Copyright"" line
self.HeaderCheckCFileCommentLicenseFormat = 1
-
+
## C Function Layout Checking
self.CFunctionLayoutCheckAll = 0
@@ -352,7 +352,7 @@ class Configuration(object):
self.MetaDataFileCheckModuleFilePpiFormat = 1
# Check Pcd Format in INF files
self.MetaDataFileCheckModuleFilePcdFormat = 1
-
+
# Check UNI file
self.UniCheckAll = 0
# Check INF or DEC file whether defined the localized information in the associated UNI file.
@@ -374,16 +374,16 @@ class Configuration(object):
# The directory listed here will not be parsed, split with ','
self.SkipDirList = []
-
+
# The file listed here will not be parsed, split with ','
self.SkipFileList = []
# A list for binary file ext name
self.BinaryExtList = []
-
+
# A list for only scanned folders
self.ScanOnlyDirList = []
-
+
# A list for Copyright format
self.Copyright = []
diff --git a/BaseTools/Source/Python/Ecc/Ecc.py b/BaseTools/Source/Python/Ecc/Ecc.py
index e78d70372e..ccd563a466 100644
--- a/BaseTools/Source/Python/Ecc/Ecc.py
+++ b/BaseTools/Source/Python/Ecc/Ecc.py
@@ -1,7 +1,7 @@
## @file
# This file is used to be the main entrance of ECC tool
#
-# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -51,7 +51,7 @@ class Ecc(object):
# Version and Copyright
self.VersionNumber = ("1.0" + " Build " + gBUILD_VERSION)
self.Version = "%prog Version " + self.VersionNumber
- self.Copyright = "Copyright (c) 2009 - 2016, Intel Corporation All rights reserved."
+ self.Copyright = "Copyright (c) 2009 - 2018, Intel Corporation All rights reserved."
self.InitDefaultConfigIni()
self.OutputFile = 'output.txt'
@@ -66,17 +66,17 @@ class Ecc(object):
# Parse the options and args
self.ParseOption()
EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
-
+
#
# Check EFI_SOURCE (Edk build convention). EDK_SOURCE will always point to ECP
#
WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
os.environ["WORKSPACE"] = WorkspaceDir
-
+
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(WorkspaceDir, PackagesPath)
-
+
if "ECP_SOURCE" not in os.environ:
os.environ["ECP_SOURCE"] = mws.join(WorkspaceDir, GlobalData.gEdkCompatibilityPkg)
if "EFI_SOURCE" not in os.environ:
@@ -90,11 +90,11 @@ class Ecc(object):
EfiSourceDir = os.path.normcase(os.path.normpath(os.environ["EFI_SOURCE"]))
EdkSourceDir = os.path.normcase(os.path.normpath(os.environ["EDK_SOURCE"]))
EcpSourceDir = os.path.normcase(os.path.normpath(os.environ["ECP_SOURCE"]))
-
+
os.environ["EFI_SOURCE"] = EfiSourceDir
os.environ["EDK_SOURCE"] = EdkSourceDir
os.environ["ECP_SOURCE"] = EcpSourceDir
-
+
GlobalData.gWorkspace = WorkspaceDir
GlobalData.gEfiSource = EfiSourceDir
GlobalData.gEdkSource = EdkSourceDir
@@ -104,7 +104,7 @@ class Ecc(object):
GlobalData.gGlobalDefines["EFI_SOURCE"] = EfiSourceDir
GlobalData.gGlobalDefines["EDK_SOURCE"] = EdkSourceDir
GlobalData.gGlobalDefines["ECP_SOURCE"] = EcpSourceDir
-
+
EdkLogger.info("Loading ECC configuration ... done")
# Generate checkpoints list
EccGlobalData.gConfig = Configuration(self.ConfigFile)
@@ -120,11 +120,11 @@ class Ecc(object):
# Get files real name in workspace dir
#
GlobalData.gAllFiles = DirCache(GlobalData.gWorkspace)
-
+
# Build ECC database
# self.BuildDatabase()
self.DetectOnlyScanDirs()
-
+
# Start to check
self.Check()
@@ -160,8 +160,8 @@ class Ecc(object):
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Use -f option need to fill specific folders in config.ini file")
else:
self.BuildDatabase()
-
-
+
+
## BuildDatabase
#
# Build the database for target
@@ -172,7 +172,7 @@ class Ecc(object):
EccGlobalData.gDb.TblReport.Create()
# Build database
- if self.IsInit:
+ if self.IsInit:
if self.ScanMetaData:
EdkLogger.quiet("Building database for Meta Data File ...")
self.BuildMetaDataFileDatabase(SpeciDirs)
@@ -198,7 +198,7 @@ class Ecc(object):
if SpecificDirs is None:
ScanFolders.append(EccGlobalData.gTarget)
else:
- for specificDir in SpecificDirs:
+ for specificDir in SpecificDirs:
ScanFolders.append(os.path.join(EccGlobalData.gTarget, specificDir))
EdkLogger.quiet("Building database for meta data files ...")
Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
@@ -219,7 +219,7 @@ class Ecc(object):
# symlinks to directories are treated as directories
Dirs.remove(Dir)
Dirs.append(Dirname)
-
+
for File in Files:
if len(File) > 4 and File[-4:].upper() == ".DEC":
Filename = os.path.normpath(os.path.join(Root, File))
diff --git a/BaseTools/Source/Python/Ecc/EccGlobalData.py b/BaseTools/Source/Python/Ecc/EccGlobalData.py
index c0b00a4265..79d0de680c 100644
--- a/BaseTools/Source/Python/Ecc/EccGlobalData.py
+++ b/BaseTools/Source/Python/Ecc/EccGlobalData.py
@@ -1,7 +1,7 @@
## @file
# This file is used to save global datas used by ECC tool
#
-# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,4 +24,4 @@ gIdentifierTableList = []
gCFileList = []
gHFileList = []
gUFileList = []
-gException = None \ No newline at end of file
+gException = None
diff --git a/BaseTools/Source/Python/Ecc/Exception.py b/BaseTools/Source/Python/Ecc/Exception.py
index bde41c3a4b..0498a503d7 100644
--- a/BaseTools/Source/Python/Ecc/Exception.py
+++ b/BaseTools/Source/Python/Ecc/Exception.py
@@ -1,7 +1,7 @@
## @file
# This file is used to parse exception items found by ECC tool
#
-# Copyright (c) 2009 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,12 +24,12 @@ class ExceptionXml(object):
self.KeyWord = ''
self.ErrorID = ''
self.FilePath = ''
-
+
def FromXml(self, Item, Key):
self.KeyWord = XmlElement(Item, '%s/KeyWord' % Key)
self.ErrorID = XmlElement(Item, '%s/ErrorID' % Key)
self.FilePath = os.path.normpath(XmlElement(Item, '%s/FilePath' % Key))
-
+
def __str__(self):
return 'ErrorID = %s KeyWord = %s FilePath = %s' %(self.ErrorID, self.KeyWord, self.FilePath)
@@ -37,22 +37,22 @@ class ExceptionXml(object):
class ExceptionListXml(object):
def __init__(self):
self.List = []
-
+
def FromXmlFile(self, FilePath):
XmlContent = XmlParseFile(FilePath)
for Item in XmlList(XmlContent, '/ExceptionList/Exception'):
Exp = ExceptionXml()
Exp.FromXml(Item, 'Exception')
self.List.append(Exp)
-
+
def ToList(self):
RtnList = []
for Item in self.List:
#RtnList.append((Item.ErrorID, Item.KeyWord, Item.FilePath))
RtnList.append((Item.ErrorID, Item.KeyWord))
-
+
return RtnList
-
+
def __str__(self):
RtnStr = ''
if self.List:
@@ -71,7 +71,7 @@ class ExceptionCheck(object):
if FilePath and os.path.isfile(FilePath):
self.ExceptionListXml.FromXmlFile(FilePath)
self.ExceptionList = self.ExceptionListXml.ToList()
-
+
def IsException(self, ErrorID, KeyWord, FileID=-1):
if (str(ErrorID), KeyWord.replace('\r\n', '\n')) in self.ExceptionList:
return True
diff --git a/BaseTools/Source/Python/Ecc/FileProfile.py b/BaseTools/Source/Python/Ecc/FileProfile.py
index f31d37ff96..a071fd020e 100644
--- a/BaseTools/Source/Python/Ecc/FileProfile.py
+++ b/BaseTools/Source/Python/Ecc/FileProfile.py
@@ -1,7 +1,7 @@
## @file
# fragments of source file
#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -36,7 +36,7 @@ FunctionCallingList = []
# May raise Exception when opening file.
#
class FileProfile :
-
+
## The constructor
#
# @param self The object pointer
@@ -54,5 +54,5 @@ class FileProfile :
except IOError:
raise Warning("Error when opening file %s" % FileName)
-
- \ No newline at end of file
+
+
diff --git a/BaseTools/Source/Python/Ecc/MetaDataParser.py b/BaseTools/Source/Python/Ecc/MetaDataParser.py
index 82ede3eb33..6060d67581 100644
--- a/BaseTools/Source/Python/Ecc/MetaDataParser.py
+++ b/BaseTools/Source/Python/Ecc/MetaDataParser.py
@@ -1,7 +1,7 @@
## @file
# This file is used to define common parser functions for meta-data
#
-# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -87,16 +87,16 @@ def GetTableList(FileModelList, Table, Db):
# @param FileName: FileName of the comment
#
def ParseHeaderCommentSection(CommentList, FileName = None):
-
+
Abstract = ''
Description = ''
Copyright = ''
License = ''
EndOfLine = "\n"
STR_HEADER_COMMENT_START = "@file"
-
+
#
- # used to indicate the state of processing header comment section of dec,
+ # used to indicate the state of processing header comment section of dec,
# inf files
#
HEADER_COMMENT_NOT_STARTED = -1
@@ -117,11 +117,11 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
if _IsCopyrightLine(Line):
Last = Index
break
-
+
for Item in CommentList:
Line = Item[0]
LineNo = Item[1]
-
+
if not Line.startswith('#') and Line:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
@@ -131,14 +131,14 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
Comment = CleanString2(Line)[1]
Comment = Comment.strip()
#
- # if there are blank lines between License or Description, keep them as they would be
+ # if there are blank lines between License or Description, keep them as they would be
# indication of different block; or in the position that Abstract should be, also keep it
# as it indicates that no abstract
#
if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
continue
-
+
if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
if Comment.startswith(STR_HEADER_COMMENT_START):
HeaderCommentStage = HEADER_COMMENT_ABSTRACT
@@ -152,39 +152,39 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
if not Comment:
Abstract = ''
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
- elif _IsCopyrightLine(Comment):
+ elif _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
- else:
+ else:
Abstract += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
#
# in case there is no description
- #
- if _IsCopyrightLine(Comment):
+ #
+ if _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
- Description += Comment + EndOfLine
+ Description += Comment + EndOfLine
elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
- if _IsCopyrightLine(Comment):
+ if _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
else:
#
# Contents after copyright line are license, those non-copyright lines in between
- # copyright line will be discarded
+ # copyright line will be discarded
#
if LineNo > Last:
if License:
License += EndOfLine
License += Comment + EndOfLine
- HeaderCommentStage = HEADER_COMMENT_LICENSE
+ HeaderCommentStage = HEADER_COMMENT_LICENSE
else:
if not Comment and not License:
continue
License += Comment + EndOfLine
-
+
if not Copyright.strip():
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
@@ -198,19 +198,19 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
for Result in ResultSet:
Msg = 'Header comment section must have license information'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
-
+
if not Abstract.strip() or Abstract.find('Component description file') > -1:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'Header comment section must have Abstract information.'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
-
+
return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
## _IsCopyrightLine
-# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
-# followed by zero or more white space characters followed by a "(" character
+# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
+# followed by zero or more white space characters followed by a "(" character
#
# @param LineContent: the line need to be checked
# @return: True if current line is copyright line, False else
@@ -218,11 +218,11 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
def _IsCopyrightLine (LineContent):
LineContent = LineContent.upper()
Result = False
-
+
ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
if ReIsCopyrightRe.search(LineContent):
Result = True
-
+
return Result
@@ -232,7 +232,7 @@ def _IsCopyrightLine (LineContent):
# Remove spaces
#
# @param Line: The string to be cleaned
-# @param CommentCharacter: Comment char, used to ignore comment content,
+# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString2(Line, CommentCharacter='#', AllowCppStyleComment=False):
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
index a41223f285..111a25ed92 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
@@ -92,7 +92,7 @@ def ParseMacro(Parser):
elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
-
+
self._ValueList = [Type, Name, Value]
return MacroParser
@@ -334,7 +334,7 @@ class MetaFileParser(object):
self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
Name, Value = self._ValueList[1], self._ValueList[2]
- # Sometimes, we need to make differences between EDK and EDK2 modules
+ # Sometimes, we need to make differences between EDK and EDK2 modules
if Name == 'INF_VERSION':
try:
self._Version = int(Value, 0)
@@ -354,7 +354,7 @@ class MetaFileParser(object):
UniFile = os.path.join(os.path.dirname(self.MetaFile), Value)
if os.path.exists(UniFile):
self._UniObj = UniParser(UniFile, IsExtraUni=False, IsModuleUni=False)
-
+
if isinstance(self, InfParser) and self._Version < 0x00010005:
# EDK module allows using defines as macros
self._FileLocalMacros[Name] = Value
@@ -390,7 +390,7 @@ class MetaFileParser(object):
return Macros
- ## Get section Macros that are applicable to current line, which may come from other sections
+ ## Get section Macros that are applicable to current line, which may come from other sections
## that share the same name while scope is wider
def _GetApplicableSectionMacro(self):
Macros = {}
@@ -473,7 +473,7 @@ class InfParser(MetaFileParser):
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
-
+
# parse the file line by line
IsFindBlockComment = False
@@ -591,7 +591,7 @@ class InfParser(MetaFileParser):
)
Usage = ''
if IsFindBlockComment:
- EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
+ EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
File=self.MetaFile)
self._Done()
@@ -818,7 +818,7 @@ class DscParser(MetaFileParser):
# the owner item
#
self._IdMapping = {-1:-1}
-
+
self.TblFile = EccGlobalData.gDb.TblFile
self.FileID = -1
@@ -838,8 +838,8 @@ class DscParser(MetaFileParser):
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
-
-
+
+
for Index in range(0, len(Content)):
Line = CleanString(Content[Index])
# skip empty line
@@ -850,7 +850,7 @@ class DscParser(MetaFileParser):
self._LineIndex = Index
if self._InSubsection and self._Owner[-1] == -1:
self._Owner.append(self._LastItem)
-
+
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionType = MODEL_META_DATA_SECTION_HEADER
@@ -960,7 +960,7 @@ class DscParser(MetaFileParser):
elif self._From > 0:
EdkLogger.error('Parser', FORMAT_INVALID,
"No '!include' allowed in included file",
- ExtraData=self._CurrentLine, File=self.MetaFile,
+ ExtraData=self._CurrentLine, File=self.MetaFile,
Line=self._LineIndex+1)
#
@@ -1154,7 +1154,7 @@ class DscParser(MetaFileParser):
MODEL_META_DATA_USER_EXTENSION : self._Skip,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._Skip,
}
-
+
self._RawTable = self._Table
self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True)
self._DirectiveStack = []
@@ -1184,7 +1184,7 @@ class DscParser(MetaFileParser):
try:
Processer[self._ItemType]()
except EvaluationException as Excpt:
- #
+ #
# Only catch expression evaluation error here. We need to report
# the precise number of line on which the error occurred
#
@@ -1194,11 +1194,11 @@ class DscParser(MetaFileParser):
# Line=self._LineIndex+1)
except MacroException as Excpt:
EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
if self._ValueList is None:
- continue
+ continue
NewOwner = self._IdMapping.get(Owner, -1)
self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
@@ -1221,7 +1221,7 @@ class DscParser(MetaFileParser):
self._IdMapping[Id] = self._LastItem
RecordList = self._Table.GetAll()
-
+
self._RawTable.Drop()
self._Table.Drop()
for Record in RecordList:
@@ -1255,7 +1255,7 @@ class DscParser(MetaFileParser):
# Don't use PCD with different values.
if Name in self._Symbols and self._Symbols[Name] != Value:
self._Symbols.pop(Name)
- continue
+ continue
self._Symbols[Name] = Value
Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
@@ -1263,12 +1263,12 @@ class DscParser(MetaFileParser):
Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
# Only use PCD whose value is straitforward (no macro and PCD)
if self.SymbolPattern.findall(Value):
- continue
+ continue
Name = TokenSpaceGuid+'.'+PcdName
# Don't use PCD with different values.
if Name in self._Symbols and self._Symbols[Name] != Value:
self._Symbols.pop(Name)
- continue
+ continue
self._Symbols[Name] = Value
def __ProcessDefine(self):
@@ -1288,13 +1288,13 @@ class DscParser(MetaFileParser):
SectionLocalMacros[Name] = Value
elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
GlobalData.gEdkGlobal[Name] = Value
-
+
#
# Keyword in [Defines] section can be used as Macros
#
if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
self._FileLocalMacros[Name] = Value
-
+
self._ValueList = [Type, Name, Value]
def __ProcessDirective(self):
@@ -1309,12 +1309,12 @@ class DscParser(MetaFileParser):
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
Result = False
except WrnExpression as Excpt:
- #
+ #
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
Result = Excpt.result
except BadExpression as Exc:
@@ -1365,14 +1365,14 @@ class DscParser(MetaFileParser):
#
elif "ECP_SOURCE" in GlobalData.gCommandLineDefines.keys():
__IncludeMacros['ECP_SOURCE'] = GlobalData.gCommandLineDefines['ECP_SOURCE']
-
+
__IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE']
__IncludeMacros['EDK_SOURCE'] = GlobalData.gGlobalDefines['EDK_SOURCE']
#
- # Allow using MACROs comes from [Defines] section to keep compatible.
+ # Allow using MACROs comes from [Defines] section to keep compatible.
#
__IncludeMacros.update(self._Macros)
-
+
IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
#
# First search the include file under the same directory as DSC file
@@ -1386,14 +1386,14 @@ class DscParser(MetaFileParser):
IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
if ErrorCode != 0:
- EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
+ EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
Line=self._LineIndex+1, ExtraData=ErrorInfo1 + "\n"+ ErrorInfo2)
self._FileWithError = IncludedFile1
IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, True)
Owner = self._Content[self._ContentIndex-1][0]
- Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable,
+ Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable,
Owner=Owner, From=Owner)
# set the parser status with current status
@@ -1417,7 +1417,7 @@ class DscParser(MetaFileParser):
self._Content.pop(self._ContentIndex-1)
self._ValueList = None
self._ContentIndex -= 1
-
+
def __ProcessSkuId(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
for Value in self._ValueList]
@@ -1434,22 +1434,22 @@ class DscParser(MetaFileParser):
# PCD value can be an expression
#
if len(ValueList) > 1 and ValueList[1] == TAB_VOID:
- PcdValue = ValueList[0]
+ PcdValue = ValueList[0]
try:
ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
- ValueList[0] = Value.result
+ ValueList[0] = Value.result
else:
PcdValue = ValueList[-1]
try:
ValueList[-1] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
ValueList[-1] = Value.result
-
+
if ValueList[-1] == 'True':
ValueList[-1] = '1'
if ValueList[-1] == 'False':
- ValueList[-1] = '0'
+ ValueList[-1] = '0'
self._ValueList[2] = '|'.join(ValueList)
@@ -1548,7 +1548,7 @@ class DecParser(MetaFileParser):
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
-
+
for Index in range(0, len(Content)):
Line, Comment = CleanString2(Content[Index])
self._CurrentLine = Line
@@ -1750,19 +1750,19 @@ class DecParser(MetaFileParser):
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
-
+
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(TokenList[1])
-
- # Has VOID* type string, may contain "|" character in the string.
+
+ # Has VOID* type string, may contain "|" character in the string.
if len(PtrValue) != 0:
ptrValueList = re.sub(ValueRe, '', TokenList[1])
ValueList = GetSplitValueList(ptrValueList)
ValueList[0] = PtrValue[0]
else:
ValueList = GetSplitValueList(TokenList[1])
-
-
+
+
# check if there's enough datum information given
if len(ValueList) != 3:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
@@ -1792,7 +1792,7 @@ class DecParser(MetaFileParser):
if not IsValid:
EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
File=self.MetaFile, Line=self._LineIndex+1)
-
+
if EccGlobalData.gConfig.UniCheckPCDInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# check Description, Prompt information
PatternDesc = re.compile('##\s*([\x21-\x7E\s]*)', re.S)
@@ -1903,7 +1903,7 @@ class DecParser(MetaFileParser):
## Fdf
#
# This class defined the structure used in Fdf object
-#
+#
# @param Filename: Input value for Ffilename of Fdf file, default is None
# @param WorkspaceDir: Input value for current workspace directory, default is None
#
@@ -1911,7 +1911,7 @@ class Fdf(object):
def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
self.WorkspaceDir = WorkspaceDir
self.IsToDatabase = IsToDatabase
-
+
self.Cur = Database.Cur
self.TblFile = Database.TblFile
self.TblFdf = Database.TblFdf
@@ -1938,15 +1938,15 @@ class Fdf(object):
self.FileList[Filename] = FileID
return self.FileList[Filename]
-
-
+
+
## Load Fdf file
#
# Load the file if it exists
#
# @param Filename: Input value for filename of Fdf file
#
- def LoadFdfFile(self, Filename):
+ def LoadFdfFile(self, Filename):
FileList = []
#
# Parse Fdf file
@@ -1991,7 +1991,7 @@ class UniParser(object):
self.FileIn = None
self.Missing = []
self.__read()
-
+
def __read(self):
try:
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_8').read()
@@ -2001,7 +2001,7 @@ class UniParser(object):
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_16_le').read()
except IOError:
self.FileIn = ""
-
+
def Start(self):
if self.IsModuleUni:
if self.IsExtraUni:
@@ -2021,7 +2021,7 @@ class UniParser(object):
self.PrintLog('STR_PACKAGE_ABSTRACT', PackageAbstract)
PackageDescription = self.CheckKeyValid('STR_PACKAGE_DESCRIPTION')
self.PrintLog('STR_PACKAGE_DESCRIPTION', PackageDescription)
-
+
def CheckKeyValid(self, Key, Contents=None):
if not Contents:
Contents = self.FileIn
@@ -2029,7 +2029,7 @@ class UniParser(object):
if KeyPattern.search(Contents):
return True
return False
-
+
def CheckPcdInfo(self, PcdCName):
PromptKey = 'STR_%s_PROMPT' % PcdCName.replace('.', '_')
PcdPrompt = self.CheckKeyValid(PromptKey)
@@ -2037,7 +2037,7 @@ class UniParser(object):
HelpKey = 'STR_%s_HELP' % PcdCName.replace('.', '_')
PcdHelp = self.CheckKeyValid(HelpKey)
self.PrintLog(HelpKey, PcdHelp)
-
+
def PrintLog(self, Key, Value):
if not Value and Key not in self.Missing:
Msg = '%s is missing in the %s file.' % (Key, self.FileName)
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
index 9faa6b58b0..ad3cf36a1c 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase a meta file table
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -25,7 +25,7 @@ from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE
MODEL_FILE_OTHERS
class MetaFileTable(Table):
- ## Constructor
+ ## Constructor
def __init__(self, Cursor, MetaFile, FileType, TableName, Temporary = False):
self.MetaFile = MetaFile
self.TblFile = EccGlobalData.gDb.TblFile
@@ -88,30 +88,30 @@ class ModuleTable(MetaFileTable):
BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0, Usage=''):
(Value1, Value2, Value3, Usage, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Usage, Scope1, Scope2))
return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Usage,
- Scope1,
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Usage,
+ Scope1,
Scope2,
BelongsToItem,
- BelongsToFile,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
+ BelongsToFile,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
Enabled
)
## Query table
#
- # @param Model: The Model of Record
- # @param Arch: The Arch attribute of Record
- # @param Platform The Platform attribute of Record
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ # @param Platform The Platform attribute of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None, Platform=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
@@ -171,28 +171,28 @@ class PackageTable(MetaFileTable):
BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
Scope2,
BelongsToItem,
- BelongsToFile,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
+ BelongsToFile,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
Enabled
)
## Query table
#
- # @param Model: The Model of Record
- # @param Arch: The Arch attribute of Record
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
@@ -252,32 +252,32 @@ class PlatformTable(MetaFileTable):
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
Scope2,
- BelongsToItem,
+ BelongsToItem,
BelongsToFile,
FromItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
Enabled
)
## Query table
#
- # @param Model: The Model of Record
+ # @param Model: The Model of Record
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
ConditionString = "Model=%s AND Enabled>0" % Model
diff --git a/BaseTools/Source/Python/Ecc/ParserWarning.py b/BaseTools/Source/Python/Ecc/ParserWarning.py
index a52c78ec31..0e62ba2bb7 100644
--- a/BaseTools/Source/Python/Ecc/ParserWarning.py
+++ b/BaseTools/Source/Python/Ecc/ParserWarning.py
@@ -1,7 +1,7 @@
## @file
# This file is used to be the warning class of ECC tool
#
-# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -27,4 +27,4 @@ class Warning (Exception):
self.message = Str
self.FileName = File
self.LineNumber = Line
- self.ToolName = 'ECC PP' \ No newline at end of file
+ self.ToolName = 'ECC PP'
diff --git a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
index 1e45806fa6..4294016ae3 100644
--- a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
+++ b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
@@ -2,7 +2,7 @@
# This is an XML API that uses a syntax similar to XPath, but it is written in
# standard python so that no extra python packages are required to use it.
#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -33,7 +33,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
Element = Doc.createElement(Name)
if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
-
+
for Item in NodeList:
if isinstance(Item, type([])):
Key = Item[0]
@@ -49,7 +49,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
-
+
return Element
## Get a list of XML nodes using XPath style syntax.
diff --git a/BaseTools/Source/Python/Ecc/Xml/__init__.py b/BaseTools/Source/Python/Ecc/Xml/__init__.py
index f09eece5fb..e8283c03cb 100644
--- a/BaseTools/Source/Python/Ecc/Xml/__init__.py
+++ b/BaseTools/Source/Python/Ecc/Xml/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Xml
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py
index e2a5cc8487..12dbf664ba 100644
--- a/BaseTools/Source/Python/Ecc/c.py
+++ b/BaseTools/Source/Python/Ecc/c.py
@@ -2349,13 +2349,13 @@ def CheckFileHeaderDoxygenComments(FullFileName):
if (len(CommentStrListTemp) <= 1):
# For Mac
CommentStrListTemp = CommentStr.split('\r')
- # Skip the content before the file header
+ # Skip the content before the file header
for CommentLine in CommentStrListTemp:
if CommentLine.strip().startswith('/** @file'):
FileStartFlag = True
if FileStartFlag == True:
CommentStrList.append(CommentLine)
-
+
ID = Result[1]
Index = 0
if CommentStrList and CommentStrList[0].strip().startswith('/** @file'):
@@ -2378,7 +2378,7 @@ def CheckFileHeaderDoxygenComments(FullFileName):
if EccGlobalData.gConfig.HeaderCheckCFileCommentStartSpacesNum == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if CommentLine.startswith('/** @file') == False and CommentLine.startswith('**/') == False and CommentLine.strip() and CommentLine.startswith(' ') == False:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment content should start with two spaces at each line', FileTable, ID)
-
+
CommentLine = CommentLine.strip()
if CommentLine.startswith('Copyright'):
NoCopyrightFlag = False
@@ -2403,9 +2403,9 @@ def CheckFileHeaderDoxygenComments(FullFileName):
# Check whether C File header Comment's each reference at list should begin with a bullet character.
if EccGlobalData.gConfig.HeaderCheckCFileCommentReferenceFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if RefListFlag == True:
- if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith(' -') == False:
- PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)
-
+ if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith(' -') == False:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)
+
if NoHeaderCommentStartFlag:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
return
diff --git a/BaseTools/Source/Python/Eot/CLexer.py b/BaseTools/Source/Python/Eot/CLexer.py
index a496f43440..b9e57c1b55 100644
--- a/BaseTools/Source/Python/Eot/CLexer.py
+++ b/BaseTools/Source/Python/Eot/CLexer.py
@@ -2,7 +2,7 @@
from antlr3 import *
from antlr3.compat import set, frozenset
-
+
## @file
# The file defines the Lexer for C source files.
#
@@ -10,7 +10,7 @@ from antlr3.compat import set, frozenset
# This file is generated by running:
# java org.antlr.Tool C.g
#
-# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@@ -4341,7 +4341,7 @@ class CLexer(Lexer):
u"\12\uffff"
)
-
+
DFA25_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
@@ -4479,7 +4479,7 @@ class CLexer(Lexer):
u"\u0192\uffff"
)
-
+
DFA35_transition = [
DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
@@ -4943,5 +4943,5 @@ class CLexer(Lexer):
# class definition for DFA #35
DFA35 = DFA
-
+
diff --git a/BaseTools/Source/Python/Eot/CParser.py b/BaseTools/Source/Python/Eot/CParser.py
index d7eff138da..b66ac2d8d5 100644
--- a/BaseTools/Source/Python/Eot/CParser.py
+++ b/BaseTools/Source/Python/Eot/CParser.py
@@ -3,7 +3,7 @@
from __future__ import print_function
from antlr3 import *
from antlr3.compat import set, frozenset
-
+
## @file
# The file defines the parser for C source files.
#
@@ -11,7 +11,7 @@ from antlr3.compat import set, frozenset
# This file is generated by running:
# java org.antlr.Tool C.g
#
-# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@@ -57,23 +57,23 @@ OctalEscape=17
# token names
tokenNames = [
- "<invalid>", "<EOR>", "<DOWN>", "<UP>",
- "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
- "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
- "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
- "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
- "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
- "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
- "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
- "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
- "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
- "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
- "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
- "'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
- "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
- "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
- "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
- "'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
+ "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
+ "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
+ "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
+ "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
+ "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
+ "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
+ "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
+ "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
+ "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
+ "'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
+ "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
+ "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
+ "'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
"'goto'", "'continue'", "'break'", "'return'"
]
@@ -106,33 +106,33 @@ class CParser(Parser):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
- PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.PredicateExpressionList.append(PredExp)
-
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
- EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.EnumerationDefinitionList.append(EnumDef)
-
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
- SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.StructUnionDefinitionList.append(SUDef)
-
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
- Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.TypedefDefinitionList.append(Tdef)
-
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
- FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
- FileProfile.FunctionDefinitionList.append(FuncDef)
-
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
- VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.VariableDeclarationList.append(VarDecl)
-
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
- FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
- FileProfile.FunctionCallingList.append(FuncCall)
-
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
@@ -144,7 +144,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1):
- return
+ return
# C.g:103:2: ( ( external_declaration )* )
# C.g:103:4: ( external_declaration )*
@@ -163,7 +163,7 @@ class CParser(Parser):
self.external_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -183,7 +183,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end translation_unit
@@ -196,7 +196,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2):
- return
+ return
# C.g:119:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? )
alt3 = 3
@@ -212,7 +212,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input)
@@ -228,7 +228,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input)
@@ -244,7 +244,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input)
@@ -260,7 +260,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input)
@@ -276,7 +276,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input)
@@ -292,7 +292,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input)
@@ -308,7 +308,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input)
@@ -324,7 +324,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input)
@@ -340,7 +340,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input)
@@ -356,7 +356,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input)
@@ -372,7 +372,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input)
@@ -388,7 +388,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input)
@@ -406,7 +406,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input)
@@ -422,7 +422,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input)
@@ -440,7 +440,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input)
@@ -456,7 +456,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input)
@@ -472,7 +472,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input)
@@ -485,7 +485,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input)
@@ -497,7 +497,7 @@ class CParser(Parser):
self.function_definition()
self.following.pop()
if self.failed:
- return
+ return
elif alt3 == 2:
@@ -506,7 +506,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
elif alt3 == 3:
@@ -515,7 +515,7 @@ class CParser(Parser):
self.macro_statement()
self.following.pop()
if self.failed:
- return
+ return
# C.g:121:20: ( ';' )?
alt2 = 2
LA2_0 = self.input.LA(1)
@@ -526,7 +526,7 @@ class CParser(Parser):
# C.g:121:21: ';'
self.match(self.input, 25, self.FOLLOW_25_in_external_declaration126)
if self.failed:
- return
+ return
@@ -542,7 +542,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end external_declaration
@@ -569,7 +569,7 @@ class CParser(Parser):
declarator1 = None
-
+
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = ''
self.function_definition_stack[-1].LBLine = 0
@@ -783,7 +783,7 @@ class CParser(Parser):
if self.backtracking == 0:
-
+
if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
else:
@@ -797,7 +797,7 @@ class CParser(Parser):
else:
self.function_definition_stack[-1].LBLine = b.start.line
self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
-
+
@@ -805,7 +805,7 @@ class CParser(Parser):
retval.stop = self.input.LT(-1)
if self.backtracking == 0:
-
+
self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
@@ -845,7 +845,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4):
- return
+ return
# C.g:167:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' )
alt9 = 2
@@ -858,7 +858,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input)
@@ -869,7 +869,7 @@ class CParser(Parser):
a = self.input.LT(1)
self.match(self.input, 26, self.FOLLOW_26_in_declaration203)
if self.failed:
- return
+ return
# C.g:167:17: (b= declaration_specifiers )?
alt7 = 2
LA7 = self.input.LA(1)
@@ -906,7 +906,7 @@ class CParser(Parser):
b = self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
@@ -914,18 +914,18 @@ class CParser(Parser):
c = self.init_declarator_list()
self.following.pop()
if self.failed:
- return
+ return
d = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration220)
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
-
+
@@ -935,7 +935,7 @@ class CParser(Parser):
s = self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# C.g:175:30: (t= init_declarator_list )?
alt8 = 2
LA8_0 = self.input.LA(1)
@@ -948,19 +948,19 @@ class CParser(Parser):
t = self.init_declarator_list()
self.following.pop()
if self.failed:
- return
+ return
e = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration243)
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
-
+
@@ -974,7 +974,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end declaration
@@ -1185,7 +1185,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7):
- return
+ return
# C.g:194:2: ( declarator ( '=' initializer )? )
# C.g:194:4: declarator ( '=' initializer )?
@@ -1193,7 +1193,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:194:15: ( '=' initializer )?
alt12 = 2
LA12_0 = self.input.LA(1)
@@ -1204,12 +1204,12 @@ class CParser(Parser):
# C.g:194:16: '=' initializer
self.match(self.input, 28, self.FOLLOW_28_in_init_declarator329)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_initializer_in_init_declarator331)
self.initializer()
self.following.pop()
if self.failed:
- return
+ return
@@ -1226,7 +1226,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end init_declarator
@@ -1239,7 +1239,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8):
- return
+ return
# C.g:198:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' )
# C.g:
@@ -1251,7 +1251,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -1273,7 +1273,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end storage_class_specifier
@@ -1291,7 +1291,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9):
- return
+ return
# C.g:206:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id )
alt13 = 12
@@ -1324,7 +1324,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input)
@@ -1334,63 +1334,63 @@ class CParser(Parser):
# C.g:206:4: 'void'
self.match(self.input, 34, self.FOLLOW_34_in_type_specifier376)
if self.failed:
- return
+ return
elif alt13 == 2:
# C.g:207:4: 'char'
self.match(self.input, 35, self.FOLLOW_35_in_type_specifier381)
if self.failed:
- return
+ return
elif alt13 == 3:
# C.g:208:4: 'short'
self.match(self.input, 36, self.FOLLOW_36_in_type_specifier386)
if self.failed:
- return
+ return
elif alt13 == 4:
# C.g:209:4: 'int'
self.match(self.input, 37, self.FOLLOW_37_in_type_specifier391)
if self.failed:
- return
+ return
elif alt13 == 5:
# C.g:210:4: 'long'
self.match(self.input, 38, self.FOLLOW_38_in_type_specifier396)
if self.failed:
- return
+ return
elif alt13 == 6:
# C.g:211:4: 'float'
self.match(self.input, 39, self.FOLLOW_39_in_type_specifier401)
if self.failed:
- return
+ return
elif alt13 == 7:
# C.g:212:4: 'double'
self.match(self.input, 40, self.FOLLOW_40_in_type_specifier406)
if self.failed:
- return
+ return
elif alt13 == 8:
# C.g:213:4: 'signed'
self.match(self.input, 41, self.FOLLOW_41_in_type_specifier411)
if self.failed:
- return
+ return
elif alt13 == 9:
# C.g:214:4: 'unsigned'
self.match(self.input, 42, self.FOLLOW_42_in_type_specifier416)
if self.failed:
- return
+ return
elif alt13 == 10:
@@ -1399,12 +1399,12 @@ class CParser(Parser):
s = self.struct_or_union_specifier()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
-
+
@@ -1414,12 +1414,12 @@ class CParser(Parser):
e = self.enum_specifier()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
-
+
if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
-
+
@@ -1429,7 +1429,7 @@ class CParser(Parser):
self.type_id()
self.following.pop()
if self.failed:
- return
+ return
@@ -1442,7 +1442,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_specifier
@@ -1455,13 +1455,13 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10):
- return
+ return
# C.g:229:5: ( IDENTIFIER )
# C.g:229:9: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id467)
if self.failed:
- return
+ return
@@ -1475,7 +1475,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_id
@@ -1612,7 +1612,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12):
- return
+ return
# C.g:240:2: ( 'struct' | 'union' )
# C.g:
@@ -1624,7 +1624,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -1646,7 +1646,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_or_union
@@ -1659,7 +1659,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13):
- return
+ return
# C.g:245:2: ( ( struct_declaration )+ )
# C.g:245:4: ( struct_declaration )+
@@ -1679,7 +1679,7 @@ class CParser(Parser):
self.struct_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -1688,7 +1688,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(16, self.input)
raise eee
@@ -1709,7 +1709,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declaration_list
@@ -1722,7 +1722,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14):
- return
+ return
# C.g:249:2: ( specifier_qualifier_list struct_declarator_list ';' )
# C.g:249:4: specifier_qualifier_list struct_declarator_list ';'
@@ -1730,15 +1730,15 @@ class CParser(Parser):
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration551)
self.struct_declarator_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration553)
if self.failed:
- return
+ return
@@ -1752,7 +1752,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declaration
@@ -1765,7 +1765,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15):
- return
+ return
# C.g:253:2: ( ( type_qualifier | type_specifier )+ )
# C.g:253:4: ( type_qualifier | type_specifier )+
@@ -1832,7 +1832,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
elif alt17 == 2:
@@ -1841,7 +1841,7 @@ class CParser(Parser):
self.type_specifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -1850,7 +1850,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(17, self.input)
raise eee
@@ -1871,7 +1871,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end specifier_qualifier_list
@@ -1884,7 +1884,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16):
- return
+ return
# C.g:257:2: ( struct_declarator ( ',' struct_declarator )* )
# C.g:257:4: struct_declarator ( ',' struct_declarator )*
@@ -1892,7 +1892,7 @@ class CParser(Parser):
self.struct_declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:257:22: ( ',' struct_declarator )*
while True: #loop18
alt18 = 2
@@ -1906,12 +1906,12 @@ class CParser(Parser):
# C.g:257:23: ',' struct_declarator
self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list587)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list589)
self.struct_declarator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -1931,7 +1931,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declarator_list
@@ -1944,7 +1944,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17):
- return
+ return
# C.g:261:2: ( declarator ( ':' constant_expression )? | ':' constant_expression )
alt20 = 2
@@ -1957,7 +1957,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input)
@@ -1969,7 +1969,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:261:15: ( ':' constant_expression )?
alt19 = 2
LA19_0 = self.input.LA(1)
@@ -1980,12 +1980,12 @@ class CParser(Parser):
# C.g:261:16: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator605)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator607)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -1995,12 +1995,12 @@ class CParser(Parser):
# C.g:262:4: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator614)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator616)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -2013,7 +2013,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end struct_declarator
@@ -2181,7 +2181,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19):
- return
+ return
# C.g:273:2: ( enumerator ( ',' enumerator )* )
# C.g:273:4: enumerator ( ',' enumerator )*
@@ -2189,7 +2189,7 @@ class CParser(Parser):
self.enumerator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:273:15: ( ',' enumerator )*
while True: #loop24
alt24 = 2
@@ -2208,12 +2208,12 @@ class CParser(Parser):
# C.g:273:16: ',' enumerator
self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list680)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_enumerator_in_enumerator_list682)
self.enumerator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -2233,7 +2233,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end enumerator_list
@@ -2246,13 +2246,13 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20):
- return
+ return
# C.g:277:2: ( IDENTIFIER ( '=' constant_expression )? )
# C.g:277:4: IDENTIFIER ( '=' constant_expression )?
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator695)
if self.failed:
- return
+ return
# C.g:277:15: ( '=' constant_expression )?
alt25 = 2
LA25_0 = self.input.LA(1)
@@ -2263,12 +2263,12 @@ class CParser(Parser):
# C.g:277:16: '=' constant_expression
self.match(self.input, 28, self.FOLLOW_28_in_enumerator698)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_enumerator700)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -2285,7 +2285,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end enumerator
@@ -2298,7 +2298,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21):
- return
+ return
# C.g:281:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' )
# C.g:
@@ -2310,7 +2310,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -2332,7 +2332,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_qualifier
@@ -2487,7 +2487,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23):
- return
+ return
# C.g:303:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ )
alt34 = 2
@@ -2500,7 +2500,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input)
@@ -2510,7 +2510,7 @@ class CParser(Parser):
# C.g:303:4: IDENTIFIER ( declarator_suffix )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator819)
if self.failed:
- return
+ return
# C.g:303:15: ( declarator_suffix )*
while True: #loop31
alt31 = 2
@@ -2754,7 +2754,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -2767,7 +2767,7 @@ class CParser(Parser):
# C.g:304:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+
self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator827)
if self.failed:
- return
+ return
# C.g:304:8: ( 'EFIAPI' )?
alt32 = 2
LA32_0 = self.input.LA(1)
@@ -2781,7 +2781,7 @@ class CParser(Parser):
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator830)
if self.failed:
- return
+ return
@@ -2789,10 +2789,10 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_direct_declarator836)
if self.failed:
- return
+ return
# C.g:304:35: ( declarator_suffix )+
cnt33 = 0
while True: #loop33
@@ -3037,7 +3037,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3046,7 +3046,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(33, self.input)
raise eee
@@ -3066,7 +3066,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end direct_declarator
@@ -3079,7 +3079,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24):
- return
+ return
# C.g:308:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' )
alt35 = 5
@@ -3095,7 +3095,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input)
@@ -3117,7 +3117,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 29, self.input)
@@ -3126,7 +3126,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input)
@@ -3135,7 +3135,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input)
@@ -3145,65 +3145,65 @@ class CParser(Parser):
# C.g:308:6: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix852)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix854)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix856)
if self.failed:
- return
+ return
elif alt35 == 2:
# C.g:309:9: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix866)
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix868)
if self.failed:
- return
+ return
elif alt35 == 3:
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix878)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix880)
self.parameter_type_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix882)
if self.failed:
- return
+ return
elif alt35 == 4:
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix892)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix894)
self.identifier_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix896)
if self.failed:
- return
+ return
elif alt35 == 5:
# C.g:312:9: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix906)
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix908)
if self.failed:
- return
+ return
@@ -3216,7 +3216,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end declarator_suffix
@@ -3229,7 +3229,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25):
- return
+ return
# C.g:316:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' )
alt38 = 3
@@ -3247,7 +3247,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input)
@@ -3263,7 +3263,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input)
@@ -3279,7 +3279,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input)
@@ -3295,7 +3295,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 5, self.input)
@@ -3313,7 +3313,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 21, self.input)
@@ -3329,7 +3329,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input)
@@ -3338,7 +3338,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input)
@@ -3347,7 +3347,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input)
@@ -3357,7 +3357,7 @@ class CParser(Parser):
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_pointer919)
if self.failed:
- return
+ return
# C.g:316:8: ( type_qualifier )+
cnt36 = 0
while True: #loop36
@@ -3405,7 +3405,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3414,7 +3414,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(36, self.input)
raise eee
@@ -3437,7 +3437,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
@@ -3447,19 +3447,19 @@ class CParser(Parser):
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_pointer930)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_pointer_in_pointer932)
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
elif alt38 == 3:
# C.g:318:4: '*'
self.match(self.input, 66, self.FOLLOW_66_in_pointer937)
if self.failed:
- return
+ return
@@ -3472,7 +3472,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end pointer
@@ -3485,7 +3485,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26):
- return
+ return
# C.g:322:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? )
# C.g:322:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )?
@@ -3493,7 +3493,7 @@ class CParser(Parser):
self.parameter_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:322:19: ( ',' ( 'OPTIONAL' )? '...' )?
alt40 = 2
LA40_0 = self.input.LA(1)
@@ -3504,7 +3504,7 @@ class CParser(Parser):
# C.g:322:20: ',' ( 'OPTIONAL' )? '...'
self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list951)
if self.failed:
- return
+ return
# C.g:322:24: ( 'OPTIONAL' )?
alt39 = 2
LA39_0 = self.input.LA(1)
@@ -3515,13 +3515,13 @@ class CParser(Parser):
# C.g:322:25: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list954)
if self.failed:
- return
+ return
self.match(self.input, 67, self.FOLLOW_67_in_parameter_type_list958)
if self.failed:
- return
+ return
@@ -3538,7 +3538,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end parameter_type_list
@@ -3551,7 +3551,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27):
- return
+ return
# C.g:326:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* )
# C.g:326:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )*
@@ -3559,7 +3559,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
# C.g:326:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )*
while True: #loop42
alt42 = 2
@@ -3585,7 +3585,7 @@ class CParser(Parser):
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_parameter_list974)
if self.failed:
- return
+ return
# C.g:326:31: ( 'OPTIONAL' )?
alt41 = 2
LA41_0 = self.input.LA(1)
@@ -3599,7 +3599,7 @@ class CParser(Parser):
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_list977)
if self.failed:
- return
+ return
@@ -3607,7 +3607,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3627,7 +3627,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end parameter_list
@@ -3640,7 +3640,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28):
- return
+ return
# C.g:330:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER )
alt46 = 2
@@ -3657,7 +3657,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input)
@@ -3668,7 +3668,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input)
@@ -3680,7 +3680,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop43
alt43 = 3
@@ -3764,7 +3764,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
elif alt43 == 2:
@@ -3773,7 +3773,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3790,7 +3790,7 @@ class CParser(Parser):
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration1004)
if self.failed:
- return
+ return
@@ -3813,7 +3813,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -3822,7 +3822,7 @@ class CParser(Parser):
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1016)
if self.failed:
- return
+ return
@@ -3835,7 +3835,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end parameter_declaration
@@ -3848,13 +3848,13 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29):
- return
+ return
# C.g:336:2: ( IDENTIFIER ( ',' IDENTIFIER )* )
# C.g:336:4: IDENTIFIER ( ',' IDENTIFIER )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1027)
if self.failed:
- return
+ return
# C.g:337:2: ( ',' IDENTIFIER )*
while True: #loop47
alt47 = 2
@@ -3868,10 +3868,10 @@ class CParser(Parser):
# C.g:337:3: ',' IDENTIFIER
self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1031)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1033)
if self.failed:
- return
+ return
else:
@@ -3891,7 +3891,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end identifier_list
@@ -3904,7 +3904,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30):
- return
+ return
# C.g:341:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id )
alt49 = 2
@@ -3922,7 +3922,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input)
@@ -3931,7 +3931,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input)
@@ -3943,7 +3943,7 @@ class CParser(Parser):
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:341:29: ( abstract_declarator )?
alt48 = 2
LA48_0 = self.input.LA(1)
@@ -3956,7 +3956,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -3968,7 +3968,7 @@ class CParser(Parser):
self.type_id()
self.following.pop()
if self.failed:
- return
+ return
@@ -3981,7 +3981,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end type_name
@@ -3994,7 +3994,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31):
- return
+ return
# C.g:346:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator )
alt51 = 2
@@ -4007,7 +4007,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input)
@@ -4019,7 +4019,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
# C.g:346:12: ( direct_abstract_declarator )?
alt50 = 2
LA50_0 = self.input.LA(1)
@@ -4204,7 +4204,7 @@ class CParser(Parser):
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -4216,7 +4216,7 @@ class CParser(Parser):
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -4229,7 +4229,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end abstract_declarator
@@ -4242,7 +4242,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32):
- return
+ return
# C.g:351:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* )
# C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )*
@@ -4264,7 +4264,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input)
@@ -4275,7 +4275,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input)
@@ -4286,7 +4286,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input)
@@ -4296,15 +4296,15 @@ class CParser(Parser):
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1086)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1088)
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_direct_abstract_declarator1090)
if self.failed:
- return
+ return
elif alt52 == 2:
@@ -4313,7 +4313,7 @@ class CParser(Parser):
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
@@ -4560,7 +4560,7 @@ class CParser(Parser):
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -4580,7 +4580,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end direct_abstract_declarator
@@ -4593,7 +4593,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33):
- return
+ return
# C.g:355:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' )
alt54 = 4
@@ -4609,7 +4609,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input)
@@ -4625,7 +4625,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input)
@@ -4634,7 +4634,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input)
@@ -4644,50 +4644,50 @@ class CParser(Parser):
# C.g:355:4: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1110)
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1112)
if self.failed:
- return
+ return
elif alt54 == 2:
# C.g:356:4: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1117)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1119)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1121)
if self.failed:
- return
+ return
elif alt54 == 3:
# C.g:357:4: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1126)
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1128)
if self.failed:
- return
+ return
elif alt54 == 4:
# C.g:358:4: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1133)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135)
self.parameter_type_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1137)
if self.failed:
- return
+ return
@@ -4700,7 +4700,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end abstract_declarator_suffix
@@ -4713,7 +4713,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34):
- return
+ return
# C.g:363:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' )
alt56 = 2
@@ -4726,7 +4726,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input)
@@ -4738,19 +4738,19 @@ class CParser(Parser):
self.assignment_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt56 == 2:
# C.g:364:4: '{' initializer_list ( ',' )? '}'
self.match(self.input, 43, self.FOLLOW_43_in_initializer1155)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_initializer_list_in_initializer1157)
self.initializer_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:364:25: ( ',' )?
alt55 = 2
LA55_0 = self.input.LA(1)
@@ -4761,13 +4761,13 @@ class CParser(Parser):
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_initializer1159)
if self.failed:
- return
+ return
self.match(self.input, 44, self.FOLLOW_44_in_initializer1162)
if self.failed:
- return
+ return
@@ -4780,7 +4780,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end initializer
@@ -4793,7 +4793,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35):
- return
+ return
# C.g:368:2: ( initializer ( ',' initializer )* )
# C.g:368:4: initializer ( ',' initializer )*
@@ -4801,7 +4801,7 @@ class CParser(Parser):
self.initializer()
self.following.pop()
if self.failed:
- return
+ return
# C.g:368:16: ( ',' initializer )*
while True: #loop57
alt57 = 2
@@ -4820,12 +4820,12 @@ class CParser(Parser):
# C.g:368:17: ',' initializer
self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1176)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_initializer_in_initializer_list1178)
self.initializer()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -4845,7 +4845,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end initializer_list
@@ -4956,7 +4956,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37):
- return
+ return
# C.g:378:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* )
# C.g:378:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )*
@@ -4966,7 +4966,7 @@ class CParser(Parser):
self.multiplicative_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -4985,24 +4985,24 @@ class CParser(Parser):
# C.g:378:33: '+' multiplicative_expression
self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1229)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1231)
self.multiplicative_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt61 == 2:
# C.g:378:65: '-' multiplicative_expression
self.match(self.input, 69, self.FOLLOW_69_in_additive_expression1235)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1237)
self.multiplicative_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -5022,7 +5022,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end additive_expression
@@ -5035,7 +5035,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38):
- return
+ return
# C.g:382:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* )
# C.g:382:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
@@ -5045,7 +5045,7 @@ class CParser(Parser):
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -5064,36 +5064,36 @@ class CParser(Parser):
# C.g:382:23: '*' cast_expression
self.match(self.input, 66, self.FOLLOW_66_in_multiplicative_expression1255)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1257)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt62 == 2:
# C.g:382:45: '/' cast_expression
self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1261)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1263)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt62 == 3:
# C.g:382:67: '%' cast_expression
self.match(self.input, 71, self.FOLLOW_71_in_multiplicative_expression1267)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1269)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -5113,7 +5113,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end multiplicative_expression
@@ -5126,7 +5126,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39):
- return
+ return
# C.g:386:2: ( '(' type_name ')' cast_expression | unary_expression )
alt63 = 2
@@ -5146,7 +5146,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 25, self.input)
@@ -5157,7 +5157,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input)
@@ -5168,7 +5168,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input)
@@ -5178,20 +5178,20 @@ class CParser(Parser):
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1282)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_type_name_in_cast_expression1284)
self.type_name()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_cast_expression1286)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_cast_expression1288)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt63 == 2:
@@ -5200,7 +5200,7 @@ class CParser(Parser):
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -5213,7 +5213,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end cast_expression
@@ -5226,7 +5226,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40):
- return
+ return
# C.g:391:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' )
alt64 = 6
@@ -5252,7 +5252,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input)
@@ -5263,7 +5263,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input)
@@ -5272,7 +5272,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input)
@@ -5284,31 +5284,31 @@ class CParser(Parser):
self.postfix_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 2:
# C.g:392:4: '++' unary_expression
self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1309)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1311)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 3:
# C.g:393:4: '--' unary_expression
self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1316)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1318)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 4:
@@ -5317,42 +5317,42 @@ class CParser(Parser):
self.unary_operator()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_unary_expression1325)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 5:
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1330)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1332)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt64 == 6:
# C.g:396:4: 'sizeof' '(' type_name ')'
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1337)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1339)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_type_name_in_unary_expression1341)
self.type_name()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_unary_expression1343)
if self.failed:
- return
+ return
@@ -5365,7 +5365,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end unary_expression
@@ -5385,13 +5385,13 @@ class CParser(Parser):
c = None
-
+
self.postfix_expression_stack[-1].FuncCallText = ''
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41):
- return
+ return
# C.g:406:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* )
# C.g:406:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
@@ -5399,7 +5399,7 @@ class CParser(Parser):
p = self.primary_expression()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
@@ -5461,26 +5461,26 @@ class CParser(Parser):
# C.g:407:13: '[' expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1383)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_postfix_expression1385)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1387)
if self.failed:
- return
+ return
elif alt65 == 2:
# C.g:408:13: '(' a= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1401)
if self.failed:
- return
+ return
a = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1405)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '')
@@ -5490,16 +5490,16 @@ class CParser(Parser):
# C.g:409:13: '(' c= argument_expression_list b= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1420)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1424)
c = self.argument_expression_list()
self.following.pop()
if self.failed:
- return
+ return
b = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1428)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
@@ -5509,26 +5509,26 @@ class CParser(Parser):
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1444)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1448)
if self.failed:
- return
+ return
elif alt65 == 5:
# C.g:411:13: '.' x= IDENTIFIER
self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1462)
if self.failed:
- return
+ return
x = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1466)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '.' + x.text
@@ -5538,11 +5538,11 @@ class CParser(Parser):
# C.g:412:13: '*' y= IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_postfix_expression1482)
if self.failed:
- return
+ return
y = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1486)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText = y.text
@@ -5552,11 +5552,11 @@ class CParser(Parser):
# C.g:413:13: '->' z= IDENTIFIER
self.match(self.input, 76, self.FOLLOW_76_in_postfix_expression1502)
if self.failed:
- return
+ return
z = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1506)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '->' + z.text
@@ -5566,14 +5566,14 @@ class CParser(Parser):
# C.g:414:13: '++'
self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1522)
if self.failed:
- return
+ return
elif alt65 == 9:
# C.g:415:13: '--'
self.match(self.input, 73, self.FOLLOW_73_in_postfix_expression1536)
if self.failed:
- return
+ return
else:
@@ -5594,7 +5594,7 @@ class CParser(Parser):
self.postfix_expression_stack.pop()
pass
- return
+ return
# $ANTLR end postfix_expression
@@ -5607,7 +5607,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42):
- return
+ return
# C.g:420:2: ( parameter_declaration ( ',' parameter_declaration )* )
# C.g:420:4: parameter_declaration ( ',' parameter_declaration )*
@@ -5615,7 +5615,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
# C.g:420:26: ( ',' parameter_declaration )*
while True: #loop66
alt66 = 2
@@ -5629,12 +5629,12 @@ class CParser(Parser):
# C.g:420:27: ',' parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1562)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1564)
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -5654,7 +5654,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end macro_parameter_list
@@ -5667,7 +5667,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43):
- return
+ return
# C.g:424:2: ( '&' | '*' | '+' | '-' | '~' | '!' )
# C.g:
@@ -5679,7 +5679,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -5701,7 +5701,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end unary_operator
@@ -5812,7 +5812,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45):
- return
+ return
# C.g:439:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL )
alt72 = 6
@@ -5832,7 +5832,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input)
@@ -5842,28 +5842,28 @@ class CParser(Parser):
# C.g:439:9: HEX_LITERAL
self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1643)
if self.failed:
- return
+ return
elif alt72 == 2:
# C.g:440:9: OCTAL_LITERAL
self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1653)
if self.failed:
- return
+ return
elif alt72 == 3:
# C.g:441:9: DECIMAL_LITERAL
self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1663)
if self.failed:
- return
+ return
elif alt72 == 4:
# C.g:442:7: CHARACTER_LITERAL
self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1671)
if self.failed:
- return
+ return
elif alt72 == 5:
@@ -5907,7 +5907,7 @@ class CParser(Parser):
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1680)
if self.failed:
- return
+ return
else:
@@ -5933,7 +5933,7 @@ class CParser(Parser):
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1683)
if self.failed:
- return
+ return
else:
@@ -5942,7 +5942,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(69, self.input)
raise eee
@@ -5958,7 +5958,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(70, self.input)
raise eee
@@ -5979,7 +5979,7 @@ class CParser(Parser):
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1688)
if self.failed:
- return
+ return
else:
@@ -5992,7 +5992,7 @@ class CParser(Parser):
# C.g:444:9: FLOATING_POINT_LITERAL
self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1699)
if self.failed:
- return
+ return
@@ -6005,7 +6005,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end constant
@@ -6088,7 +6088,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47):
- return
+ return
# C.g:454:2: ( conditional_expression )
# C.g:454:4: conditional_expression
@@ -6096,7 +6096,7 @@ class CParser(Parser):
self.conditional_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -6110,7 +6110,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end constant_expression
@@ -6123,7 +6123,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48):
- return
+ return
# C.g:458:2: ( lvalue assignment_operator assignment_expression | conditional_expression )
alt74 = 2
@@ -6140,7 +6140,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input)
@@ -6156,7 +6156,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input)
@@ -6172,7 +6172,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input)
@@ -6188,7 +6188,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input)
@@ -6204,7 +6204,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input)
@@ -6220,7 +6220,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input)
@@ -6236,7 +6236,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input)
@@ -6254,7 +6254,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input)
@@ -6270,7 +6270,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 22, self.input)
@@ -6281,7 +6281,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input)
@@ -6299,7 +6299,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input)
@@ -6315,7 +6315,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input)
@@ -6331,7 +6331,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input)
@@ -6347,7 +6347,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input)
@@ -6363,7 +6363,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input)
@@ -6379,7 +6379,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input)
@@ -6395,7 +6395,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input)
@@ -6408,7 +6408,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input)
@@ -6426,7 +6426,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input)
@@ -6442,7 +6442,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input)
@@ -6458,7 +6458,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input)
@@ -6474,7 +6474,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input)
@@ -6490,7 +6490,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input)
@@ -6506,7 +6506,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input)
@@ -6522,7 +6522,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input)
@@ -6535,7 +6535,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input)
@@ -6553,7 +6553,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input)
@@ -6569,7 +6569,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input)
@@ -6585,7 +6585,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input)
@@ -6601,7 +6601,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input)
@@ -6617,7 +6617,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input)
@@ -6633,7 +6633,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input)
@@ -6649,7 +6649,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input)
@@ -6662,7 +6662,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input)
@@ -6680,7 +6680,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input)
@@ -6696,7 +6696,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input)
@@ -6712,7 +6712,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input)
@@ -6728,7 +6728,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input)
@@ -6744,7 +6744,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input)
@@ -6760,7 +6760,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input)
@@ -6776,7 +6776,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input)
@@ -6789,7 +6789,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input)
@@ -6807,7 +6807,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input)
@@ -6823,7 +6823,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input)
@@ -6839,7 +6839,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input)
@@ -6855,7 +6855,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input)
@@ -6871,7 +6871,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input)
@@ -6887,7 +6887,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input)
@@ -6903,7 +6903,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input)
@@ -6919,7 +6919,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input)
@@ -6937,7 +6937,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 189, self.input)
@@ -6948,7 +6948,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input)
@@ -6966,7 +6966,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input)
@@ -6982,7 +6982,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input)
@@ -6998,7 +6998,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input)
@@ -7014,7 +7014,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input)
@@ -7030,7 +7030,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input)
@@ -7046,7 +7046,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input)
@@ -7062,7 +7062,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input)
@@ -7075,7 +7075,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input)
@@ -7093,7 +7093,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input)
@@ -7109,7 +7109,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input)
@@ -7125,7 +7125,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input)
@@ -7141,7 +7141,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input)
@@ -7157,7 +7157,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input)
@@ -7173,7 +7173,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input)
@@ -7189,7 +7189,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input)
@@ -7205,7 +7205,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input)
@@ -7221,7 +7221,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input)
@@ -7237,7 +7237,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input)
@@ -7253,7 +7253,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input)
@@ -7269,7 +7269,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input)
@@ -7280,7 +7280,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input)
@@ -7298,7 +7298,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input)
@@ -7314,7 +7314,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input)
@@ -7330,7 +7330,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input)
@@ -7346,7 +7346,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input)
@@ -7362,7 +7362,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input)
@@ -7378,7 +7378,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input)
@@ -7394,7 +7394,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input)
@@ -7410,7 +7410,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input)
@@ -7426,7 +7426,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input)
@@ -7442,7 +7442,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input)
@@ -7458,7 +7458,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input)
@@ -7474,7 +7474,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input)
@@ -7483,7 +7483,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input)
@@ -7501,7 +7501,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input)
@@ -7517,7 +7517,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input)
@@ -7533,7 +7533,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input)
@@ -7549,7 +7549,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input)
@@ -7565,7 +7565,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input)
@@ -7581,7 +7581,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input)
@@ -7597,7 +7597,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input)
@@ -7613,7 +7613,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input)
@@ -7629,7 +7629,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input)
@@ -7645,7 +7645,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input)
@@ -7661,7 +7661,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input)
@@ -7677,7 +7677,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input)
@@ -7686,7 +7686,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input)
@@ -7704,7 +7704,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input)
@@ -7720,7 +7720,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input)
@@ -7736,7 +7736,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input)
@@ -7752,7 +7752,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input)
@@ -7768,7 +7768,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input)
@@ -7784,7 +7784,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input)
@@ -7800,7 +7800,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input)
@@ -7816,7 +7816,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input)
@@ -7832,7 +7832,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input)
@@ -7848,7 +7848,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input)
@@ -7864,7 +7864,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input)
@@ -7880,7 +7880,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input)
@@ -7889,7 +7889,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input)
@@ -7907,7 +7907,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input)
@@ -7923,7 +7923,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input)
@@ -7939,7 +7939,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input)
@@ -7955,7 +7955,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input)
@@ -7971,7 +7971,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input)
@@ -7987,7 +7987,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input)
@@ -8003,7 +8003,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input)
@@ -8019,7 +8019,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input)
@@ -8035,7 +8035,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input)
@@ -8051,7 +8051,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input)
@@ -8067,7 +8067,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input)
@@ -8083,7 +8083,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input)
@@ -8092,7 +8092,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input)
@@ -8101,7 +8101,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input)
@@ -8113,17 +8113,17 @@ class CParser(Parser):
self.lvalue()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1746)
self.assignment_operator()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1748)
self.assignment_expression()
self.following.pop()
if self.failed:
- return
+ return
elif alt74 == 2:
@@ -8132,7 +8132,7 @@ class CParser(Parser):
self.conditional_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -8145,7 +8145,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end assignment_expression
@@ -8158,7 +8158,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49):
- return
+ return
# C.g:463:2: ( unary_expression )
# C.g:463:4: unary_expression
@@ -8166,7 +8166,7 @@ class CParser(Parser):
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
@@ -8180,7 +8180,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end lvalue
@@ -8193,7 +8193,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50):
- return
+ return
# C.g:467:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' )
# C.g:
@@ -8205,7 +8205,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8227,7 +8227,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end assignment_operator
@@ -8243,7 +8243,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51):
- return
+ return
# C.g:481:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? )
# C.g:481:4: e= logical_or_expression ( '?' expression ':' conditional_expression )?
@@ -8251,7 +8251,7 @@ class CParser(Parser):
e = self.logical_or_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:481:28: ( '?' expression ':' conditional_expression )?
alt75 = 2
LA75_0 = self.input.LA(1)
@@ -8262,20 +8262,20 @@ class CParser(Parser):
# C.g:481:29: '?' expression ':' conditional_expression
self.match(self.input, 90, self.FOLLOW_90_in_conditional_expression1842)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_conditional_expression1844)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1846)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1848)
self.conditional_expression()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -8295,7 +8295,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end conditional_expression
@@ -8378,7 +8378,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53):
- return
+ return
# C.g:489:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* )
# C.g:489:4: inclusive_or_expression ( '&&' inclusive_or_expression )*
@@ -8386,7 +8386,7 @@ class CParser(Parser):
self.inclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:489:28: ( '&&' inclusive_or_expression )*
while True: #loop77
alt77 = 2
@@ -8400,12 +8400,12 @@ class CParser(Parser):
# C.g:489:29: '&&' inclusive_or_expression
self.match(self.input, 92, self.FOLLOW_92_in_logical_and_expression1884)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1886)
self.inclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8425,7 +8425,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end logical_and_expression
@@ -8438,7 +8438,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54):
- return
+ return
# C.g:493:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* )
# C.g:493:4: exclusive_or_expression ( '|' exclusive_or_expression )*
@@ -8446,7 +8446,7 @@ class CParser(Parser):
self.exclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:493:28: ( '|' exclusive_or_expression )*
while True: #loop78
alt78 = 2
@@ -8460,12 +8460,12 @@ class CParser(Parser):
# C.g:493:29: '|' exclusive_or_expression
self.match(self.input, 93, self.FOLLOW_93_in_inclusive_or_expression1902)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904)
self.exclusive_or_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8485,7 +8485,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end inclusive_or_expression
@@ -8498,7 +8498,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55):
- return
+ return
# C.g:497:2: ( and_expression ( '^' and_expression )* )
# C.g:497:4: and_expression ( '^' and_expression )*
@@ -8506,7 +8506,7 @@ class CParser(Parser):
self.and_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:497:19: ( '^' and_expression )*
while True: #loop79
alt79 = 2
@@ -8520,12 +8520,12 @@ class CParser(Parser):
# C.g:497:20: '^' and_expression
self.match(self.input, 94, self.FOLLOW_94_in_exclusive_or_expression1920)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1922)
self.and_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8545,7 +8545,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end exclusive_or_expression
@@ -8558,7 +8558,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56):
- return
+ return
# C.g:501:2: ( equality_expression ( '&' equality_expression )* )
# C.g:501:4: equality_expression ( '&' equality_expression )*
@@ -8566,7 +8566,7 @@ class CParser(Parser):
self.equality_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:501:24: ( '&' equality_expression )*
while True: #loop80
alt80 = 2
@@ -8580,12 +8580,12 @@ class CParser(Parser):
# C.g:501:25: '&' equality_expression
self.match(self.input, 77, self.FOLLOW_77_in_and_expression1938)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_equality_expression_in_and_expression1940)
self.equality_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8605,7 +8605,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end and_expression
@@ -8618,7 +8618,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57):
- return
+ return
# C.g:504:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* )
# C.g:504:4: relational_expression ( ( '==' | '!=' ) relational_expression )*
@@ -8626,7 +8626,7 @@ class CParser(Parser):
self.relational_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:504:26: ( ( '==' | '!=' ) relational_expression )*
while True: #loop81
alt81 = 2
@@ -8646,7 +8646,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8659,7 +8659,7 @@ class CParser(Parser):
self.relational_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8679,7 +8679,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end equality_expression
@@ -8692,7 +8692,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58):
- return
+ return
# C.g:508:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* )
# C.g:508:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
@@ -8700,7 +8700,7 @@ class CParser(Parser):
self.shift_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:508:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
while True: #loop82
alt82 = 2
@@ -8720,7 +8720,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8733,7 +8733,7 @@ class CParser(Parser):
self.shift_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8753,7 +8753,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end relational_expression
@@ -8766,7 +8766,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59):
- return
+ return
# C.g:512:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* )
# C.g:512:4: additive_expression ( ( '<<' | '>>' ) additive_expression )*
@@ -8774,7 +8774,7 @@ class CParser(Parser):
self.additive_expression()
self.following.pop()
if self.failed:
- return
+ return
# C.g:512:24: ( ( '<<' | '>>' ) additive_expression )*
while True: #loop83
alt83 = 2
@@ -8794,7 +8794,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -8807,7 +8807,7 @@ class CParser(Parser):
self.additive_expression()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -8827,7 +8827,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end shift_expression
@@ -8840,7 +8840,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60):
- return
+ return
# C.g:518:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration )
alt84 = 11
@@ -8861,7 +8861,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 43, self.input)
@@ -8881,7 +8881,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input)
@@ -8897,7 +8897,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 53, self.input)
@@ -8913,7 +8913,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 68, self.input)
@@ -8924,7 +8924,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input)
@@ -8953,7 +8953,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input)
@@ -8965,7 +8965,7 @@ class CParser(Parser):
self.labeled_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 2:
@@ -8974,7 +8974,7 @@ class CParser(Parser):
self.compound_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 3:
@@ -8983,7 +8983,7 @@ class CParser(Parser):
self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 4:
@@ -8992,7 +8992,7 @@ class CParser(Parser):
self.selection_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 5:
@@ -9001,7 +9001,7 @@ class CParser(Parser):
self.iteration_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 6:
@@ -9010,7 +9010,7 @@ class CParser(Parser):
self.jump_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 7:
@@ -9019,7 +9019,7 @@ class CParser(Parser):
self.macro_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 8:
@@ -9028,7 +9028,7 @@ class CParser(Parser):
self.asm2_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 9:
@@ -9037,7 +9037,7 @@ class CParser(Parser):
self.asm1_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 10:
@@ -9046,7 +9046,7 @@ class CParser(Parser):
self.asm_statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt84 == 11:
@@ -9055,7 +9055,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
@@ -9068,7 +9068,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end statement
@@ -9081,7 +9081,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61):
- return
+ return
# C.g:532:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' )
# C.g:532:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';'
@@ -9095,16 +9095,16 @@ class CParser(Parser):
# C.g:0:0: '__asm__'
self.match(self.input, 103, self.FOLLOW_103_in_asm2_statement2086)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2089)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2091)
if self.failed:
- return
+ return
# C.g:532:30: (~ ( ';' ) )*
while True: #loop86
alt86 = 2
@@ -9131,7 +9131,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -9148,10 +9148,10 @@ class CParser(Parser):
self.match(self.input, 63, self.FOLLOW_63_in_asm2_statement2101)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2103)
if self.failed:
- return
+ return
@@ -9165,7 +9165,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end asm2_statement
@@ -9178,16 +9178,16 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62):
- return
+ return
# C.g:536:2: ( '_asm' '{' (~ ( '}' ) )* '}' )
# C.g:536:4: '_asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 104, self.FOLLOW_104_in_asm1_statement2115)
if self.failed:
- return
+ return
self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2117)
if self.failed:
- return
+ return
# C.g:536:15: (~ ( '}' ) )*
while True: #loop87
alt87 = 2
@@ -9207,7 +9207,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -9224,7 +9224,7 @@ class CParser(Parser):
self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2127)
if self.failed:
- return
+ return
@@ -9238,7 +9238,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end asm1_statement
@@ -9251,16 +9251,16 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63):
- return
+ return
# C.g:540:2: ( '__asm' '{' (~ ( '}' ) )* '}' )
# C.g:540:4: '__asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 105, self.FOLLOW_105_in_asm_statement2138)
if self.failed:
- return
+ return
self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2140)
if self.failed:
- return
+ return
# C.g:540:16: (~ ( '}' ) )*
while True: #loop88
alt88 = 2
@@ -9280,7 +9280,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
@@ -9297,7 +9297,7 @@ class CParser(Parser):
self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2150)
if self.failed:
- return
+ return
@@ -9311,7 +9311,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end asm_statement
@@ -9324,16 +9324,16 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64):
- return
+ return
# C.g:544:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' )
# C.g:544:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')'
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2162)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2164)
if self.failed:
- return
+ return
# C.g:544:19: ( declaration )*
while True: #loop89
alt89 = 2
@@ -11235,7 +11235,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -12441,7 +12441,7 @@ class CParser(Parser):
self.statement_list()
self.following.pop()
if self.failed:
- return
+ return
@@ -12457,13 +12457,13 @@ class CParser(Parser):
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_macro_statement2176)
if self.failed:
- return
+ return
@@ -12477,7 +12477,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end macro_statement
@@ -12490,7 +12490,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65):
- return
+ return
# C.g:548:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement )
alt92 = 3
@@ -12504,7 +12504,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input)
@@ -12514,50 +12514,50 @@ class CParser(Parser):
# C.g:548:4: IDENTIFIER ':' statement
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2188)
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2190)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_labeled_statement2192)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt92 == 2:
# C.g:549:4: 'case' constant_expression ':' statement
self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2197)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2199)
self.constant_expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2201)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_labeled_statement2203)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
elif alt92 == 3:
# C.g:550:4: 'default' ':' statement
self.match(self.input, 107, self.FOLLOW_107_in_labeled_statement2208)
if self.failed:
- return
+ return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2210)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_labeled_statement2212)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
@@ -12570,7 +12570,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end labeled_statement
@@ -14553,7 +14553,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67):
- return
+ return
# C.g:558:2: ( ( statement )+ )
# C.g:558:4: ( statement )+
@@ -16231,7 +16231,7 @@ class CParser(Parser):
self.statement()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -16240,7 +16240,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(95, self.input)
raise eee
@@ -16261,7 +16261,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end statement_list
@@ -16348,7 +16348,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69):
- return
+ return
# C.g:567:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement )
alt98 = 2
@@ -16361,7 +16361,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input)
@@ -16371,18 +16371,18 @@ class CParser(Parser):
# C.g:567:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )?
self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2272)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2274)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_selection_statement2278)
e = self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2280)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16390,7 +16390,7 @@ class CParser(Parser):
self.statement()
self.following.pop()
if self.failed:
- return
+ return
# C.g:567:167: ( options {k=1; backtrack=false; } : 'else' statement )?
alt97 = 2
LA97_0 = self.input.LA(1)
@@ -16401,12 +16401,12 @@ class CParser(Parser):
# C.g:567:200: 'else' statement
self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2299)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_selection_statement2301)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
@@ -16416,23 +16416,23 @@ class CParser(Parser):
# C.g:568:4: 'switch' '(' expression ')' statement
self.match(self.input, 110, self.FOLLOW_110_in_selection_statement2308)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2310)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_selection_statement2312)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2314)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_selection_statement2316)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
@@ -16445,7 +16445,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end selection_statement
@@ -16461,7 +16461,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70):
- return
+ return
# C.g:572:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement )
alt100 = 3
@@ -16475,7 +16475,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input)
@@ -16485,23 +16485,23 @@ class CParser(Parser):
# C.g:572:4: 'while' '(' e= expression ')' statement
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2327)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2329)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_iteration_statement2333)
e = self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2335)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_iteration_statement2337)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16511,29 +16511,29 @@ class CParser(Parser):
# C.g:573:4: 'do' statement 'while' '(' e= expression ')' ';'
self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2344)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_iteration_statement2346)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2348)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2350)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_iteration_statement2354)
e = self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2356)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2358)
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16543,20 +16543,20 @@ class CParser(Parser):
# C.g:574:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement
self.match(self.input, 113, self.FOLLOW_113_in_iteration_statement2365)
if self.failed:
- return
+ return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2367)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2369)
self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2373)
e = self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
# C.g:574:58: ( expression )?
alt99 = 2
LA99_0 = self.input.LA(1)
@@ -16569,18 +16569,18 @@ class CParser(Parser):
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2378)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_statement_in_iteration_statement2380)
self.statement()
self.following.pop()
if self.failed:
- return
+ return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@@ -16596,7 +16596,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end iteration_statement
@@ -16609,7 +16609,7 @@ class CParser(Parser):
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71):
- return
+ return
# C.g:578:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' )
alt101 = 5
@@ -16630,7 +16630,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input)
@@ -16639,7 +16639,7 @@ class CParser(Parser):
else:
if self.backtracking > 0:
self.failed = True
- return
+ return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input)
@@ -16649,58 +16649,58 @@ class CParser(Parser):
# C.g:578:4: 'goto' IDENTIFIER ';'
self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2393)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2395)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2397)
if self.failed:
- return
+ return
elif alt101 == 2:
# C.g:579:4: 'continue' ';'
self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2402)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2404)
if self.failed:
- return
+ return
elif alt101 == 3:
# C.g:580:4: 'break' ';'
self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2409)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2411)
if self.failed:
- return
+ return
elif alt101 == 4:
# C.g:581:4: 'return' ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2416)
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2418)
if self.failed:
- return
+ return
elif alt101 == 5:
# C.g:582:4: 'return' expression ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2423)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_expression_in_jump_statement2425)
self.expression()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2427)
if self.failed:
- return
+ return
@@ -16713,7 +16713,7 @@ class CParser(Parser):
pass
- return
+ return
# $ANTLR end jump_statement
@@ -16725,7 +16725,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred2
@@ -16856,7 +16856,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
@@ -16864,7 +16864,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# C.g:119:41: ( declaration )*
while True: #loop103
alt103 = 2
@@ -16880,7 +16880,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -16889,7 +16889,7 @@ class CParser(Parser):
self.match(self.input, 43, self.FOLLOW_43_in_synpred4108)
if self.failed:
- return
+ return
# $ANTLR end synpred4
@@ -16904,7 +16904,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred5
@@ -16919,7 +16919,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred7
@@ -16934,7 +16934,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred10
@@ -16949,7 +16949,7 @@ class CParser(Parser):
self.type_specifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred14
@@ -16964,7 +16964,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred15
@@ -16979,7 +16979,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred33
@@ -16992,7 +16992,7 @@ class CParser(Parser):
# C.g:225:5: IDENTIFIER ( type_qualifier )* declarator
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34442)
if self.failed:
- return
+ return
# C.g:225:16: ( type_qualifier )*
while True: #loop106
alt106 = 2
@@ -17027,7 +17027,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -17038,7 +17038,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred34
@@ -17053,7 +17053,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred39
@@ -17068,7 +17068,7 @@ class CParser(Parser):
self.type_specifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred40
@@ -17091,7 +17091,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
@@ -17105,7 +17105,7 @@ class CParser(Parser):
# C.g:297:14: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred66788)
if self.failed:
- return
+ return
@@ -17119,7 +17119,7 @@ class CParser(Parser):
# C.g:297:26: 'EFI_BOOTSERVICE'
self.match(self.input, 59, self.FOLLOW_59_in_synpred66793)
if self.failed:
- return
+ return
@@ -17133,7 +17133,7 @@ class CParser(Parser):
# C.g:297:47: 'EFI_RUNTIMESERVICE'
self.match(self.input, 60, self.FOLLOW_60_in_synpred66798)
if self.failed:
- return
+ return
@@ -17141,7 +17141,7 @@ class CParser(Parser):
self.direct_declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred66
@@ -17156,7 +17156,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred67
@@ -17169,7 +17169,7 @@ class CParser(Parser):
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred69830)
if self.failed:
- return
+ return
# $ANTLR end synpred69
@@ -17184,7 +17184,7 @@ class CParser(Parser):
self.declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred70
@@ -17197,15 +17197,15 @@ class CParser(Parser):
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred73878)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_parameter_type_list_in_synpred73880)
self.parameter_type_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred73882)
if self.failed:
- return
+ return
# $ANTLR end synpred73
@@ -17218,15 +17218,15 @@ class CParser(Parser):
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred74892)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_identifier_list_in_synpred74894)
self.identifier_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred74896)
if self.failed:
- return
+ return
# $ANTLR end synpred74
@@ -17241,7 +17241,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred75
@@ -17256,7 +17256,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred76
@@ -17269,7 +17269,7 @@ class CParser(Parser):
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_synpred77919)
if self.failed:
- return
+ return
# C.g:316:8: ( type_qualifier )+
cnt116 = 0
while True: #loop116
@@ -17286,7 +17286,7 @@ class CParser(Parser):
self.type_qualifier()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -17295,7 +17295,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(116, self.input)
raise eee
@@ -17315,7 +17315,7 @@ class CParser(Parser):
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
@@ -17331,12 +17331,12 @@ class CParser(Parser):
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_synpred78930)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_pointer_in_synpred78932)
self.pointer()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred78
@@ -17349,7 +17349,7 @@ class CParser(Parser):
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred81977)
if self.failed:
- return
+ return
# $ANTLR end synpred81
@@ -17362,7 +17362,7 @@ class CParser(Parser):
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_synpred82974)
if self.failed:
- return
+ return
# C.g:326:31: ( 'OPTIONAL' )?
alt119 = 2
LA119_0 = self.input.LA(1)
@@ -17376,7 +17376,7 @@ class CParser(Parser):
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred82977)
if self.failed:
- return
+ return
@@ -17384,7 +17384,7 @@ class CParser(Parser):
self.parameter_declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred82
@@ -17399,7 +17399,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred83
@@ -17414,7 +17414,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred84
@@ -17429,7 +17429,7 @@ class CParser(Parser):
self.declaration_specifiers()
self.following.pop()
if self.failed:
- return
+ return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop120
alt120 = 3
@@ -17513,7 +17513,7 @@ class CParser(Parser):
self.declarator()
self.following.pop()
if self.failed:
- return
+ return
elif alt120 == 2:
@@ -17522,7 +17522,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
else:
@@ -17539,7 +17539,7 @@ class CParser(Parser):
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred861004)
if self.failed:
- return
+ return
@@ -17557,7 +17557,7 @@ class CParser(Parser):
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
- return
+ return
# C.g:341:29: ( abstract_declarator )?
alt122 = 2
LA122_0 = self.input.LA(1)
@@ -17570,7 +17570,7 @@ class CParser(Parser):
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
@@ -17588,7 +17588,7 @@ class CParser(Parser):
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred91
@@ -17601,15 +17601,15 @@ class CParser(Parser):
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred931086)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_abstract_declarator_in_synpred931088)
self.abstract_declarator()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred931090)
if self.failed:
- return
+ return
# $ANTLR end synpred93
@@ -17624,7 +17624,7 @@ class CParser(Parser):
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred94
@@ -17637,20 +17637,20 @@ class CParser(Parser):
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_synpred1091282)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_type_name_in_synpred1091284)
self.type_name()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1091286)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_cast_expression_in_synpred1091288)
self.cast_expression()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred109
@@ -17663,12 +17663,12 @@ class CParser(Parser):
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_synpred1141330)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_unary_expression_in_synpred1141332)
self.unary_expression()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred114
@@ -17681,15 +17681,15 @@ class CParser(Parser):
# C.g:409:13: '(' argument_expression_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1171420)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_argument_expression_list_in_synpred1171424)
self.argument_expression_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1171428)
if self.failed:
- return
+ return
# $ANTLR end synpred117
@@ -17702,15 +17702,15 @@ class CParser(Parser):
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1181444)
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1181446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
- return
+ return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1181448)
if self.failed:
- return
+ return
# $ANTLR end synpred118
@@ -17723,10 +17723,10 @@ class CParser(Parser):
# C.g:412:13: '*' IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_synpred1201482)
if self.failed:
- return
+ return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1201486)
if self.failed:
- return
+ return
# $ANTLR end synpred120
@@ -17739,7 +17739,7 @@ class CParser(Parser):
# C.g:443:20: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371683)
if self.failed:
- return
+ return
# $ANTLR end synpred137
@@ -17763,7 +17763,7 @@ class CParser(Parser):
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1381680)
if self.failed:
- return
+ return
else:
@@ -17784,7 +17784,7 @@ class CParser(Parser):
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1381683)
if self.failed:
- return
+ return
else:
@@ -17793,7 +17793,7 @@ class CParser(Parser):
if self.backtracking > 0:
self.failed = True
- return
+ return
eee = EarlyExitException(126, self.input)
raise eee
@@ -17815,17 +17815,17 @@ class CParser(Parser):
self.lvalue()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_operator_in_synpred1421746)
self.assignment_operator()
self.following.pop()
if self.failed:
- return
+ return
self.following.append(self.FOLLOW_assignment_expression_in_synpred1421748)
self.assignment_expression()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred142
@@ -17840,7 +17840,7 @@ class CParser(Parser):
self.expression_statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred169
@@ -17855,7 +17855,7 @@ class CParser(Parser):
self.macro_statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred173
@@ -17870,7 +17870,7 @@ class CParser(Parser):
self.asm2_statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred174
@@ -17885,7 +17885,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred181
@@ -17900,7 +17900,7 @@ class CParser(Parser):
self.statement_list()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred182
@@ -17915,7 +17915,7 @@ class CParser(Parser):
self.declaration()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred186
@@ -17930,7 +17930,7 @@ class CParser(Parser):
self.statement()
self.following.pop()
if self.failed:
- return
+ return
# $ANTLR end synpred188
@@ -18389,7 +18389,7 @@ class CParser(Parser):
-
+
FOLLOW_external_declaration_in_translation_unit74 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_function_definition_in_external_declaration113 = frozenset([1])
diff --git a/BaseTools/Source/Python/Eot/Eot.py b/BaseTools/Source/Python/Eot/Eot.py
index dfd1146af7..297847cdab 100644
--- a/BaseTools/Source/Python/Eot/Eot.py
+++ b/BaseTools/Source/Python/Eot/Eot.py
@@ -1491,7 +1491,7 @@ class MultipleFv(FirmwareVolume):
Fv.frombuffer(Buf, 0, len(Buf))
self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size])
- self.FfsDict.append(Fv.FfsDict)
+ self.FfsDict.append(Fv.FfsDict)
## Class Eot
#
@@ -1510,7 +1510,7 @@ class Eot(object):
# Version and Copyright
self.VersionNumber = ("0.02" + " " + gBUILD_VERSION)
self.Version = "%prog Version " + self.VersionNumber
- self.Copyright = "Copyright (c) 2008 - 2010, Intel Corporation All rights reserved."
+ self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved."
self.Report = Report
self.IsInit = IsInit
@@ -1522,7 +1522,7 @@ class Eot(object):
self.FvFileList = FvFileList
self.MapFileList = MapFileList
self.Dispatch = Dispatch
-
+
# Check workspace environment
if "EFI_SOURCE" not in os.environ:
if "EDK_SOURCE" not in os.environ:
@@ -1562,13 +1562,13 @@ class Eot(object):
if not os.path.isfile(MapFile):
EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile)
EotGlobalData.gMAP_FILE.append(MapFile)
-
+
# Generate source file list
self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList)
# Generate guid list of dec file list
self.ParseDecFile(self.DecFileList)
-
+
# Generate guid list from GUID list file
self.ParseGuidList(self.GuidList)
@@ -1628,7 +1628,7 @@ class Eot(object):
if len(list) == 2:
EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip())
-
+
## ParseGuidList() method
#
# Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue}
@@ -1643,7 +1643,7 @@ class Eot(object):
for Line in open(Path):
(GuidName, GuidValue) = Line.split()
EotGlobalData.gGuidDict[GuidName] = GuidValue
-
+
## ConvertLogFile() method
#
# Parse a real running log file to get real dispatch order
@@ -1999,7 +1999,7 @@ class Eot(object):
if Options.FvFileList:
self.FvFileList = Options.FvFileList
-
+
if Options.MapFileList:
self.MapFileList = Options.FvMapFileList
@@ -2011,7 +2011,7 @@ class Eot(object):
if Options.DecFileList:
self.DecFileList = Options.DecFileList
-
+
if Options.GuidList:
self.GuidList = Options.GuidList
diff --git a/BaseTools/Source/Python/Eot/ParserWarning.py b/BaseTools/Source/Python/Eot/ParserWarning.py
index 2e3f5e4983..d121598153 100644
--- a/BaseTools/Source/Python/Eot/ParserWarning.py
+++ b/BaseTools/Source/Python/Eot/ParserWarning.py
@@ -1,7 +1,7 @@
## @file
# Warning information of Eot
#
-# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -23,4 +23,4 @@ class Warning (Exception):
self.message = Str
self.FileName = File
self.LineNumber = Line
- self.ToolName = 'EOT' \ No newline at end of file
+ self.ToolName = 'EOT'
diff --git a/BaseTools/Source/Python/Eot/Report.py b/BaseTools/Source/Python/Eot/Report.py
index 7435b4d7c9..e9716c988c 100644
--- a/BaseTools/Source/Python/Eot/Report.py
+++ b/BaseTools/Source/Python/Eot/Report.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create report for Eot tool
#
-# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -276,13 +276,13 @@ class Report(object):
</tr>
<tr id='Ffs%s' style='display:none;'>
<td colspan="4"><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, FfsPath, FfsName, FfsGuid, FfsOffset, FfsType, self.FfsIndex)
-
+
if self.DispatchList:
if FfsObj.Type in [0x04, 0x06]:
self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "P", FfsName, FfsPath))
if FfsObj.Type in [0x05, 0x07, 0x08, 0x0A]:
self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "D", FfsName, FfsPath))
-
+
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('%s\n' %FfsName)
diff --git a/BaseTools/Source/Python/GenFds/Attribute.py b/BaseTools/Source/Python/GenFds/Attribute.py
index 17761f2e24..97b4ae01c6 100644
--- a/BaseTools/Source/Python/GenFds/Attribute.py
+++ b/BaseTools/Source/Python/GenFds/Attribute.py
@@ -1,7 +1,7 @@
## @file
# name value pair
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -25,4 +25,4 @@ class Attribute:
# @param self The object pointer
def __init__(self):
self.Name = None
- self.Value = None \ No newline at end of file
+ self.Value = None
diff --git a/BaseTools/Source/Python/GenFds/Capsule.py b/BaseTools/Source/Python/GenFds/Capsule.py
index b02661d998..35a25bd380 100644
--- a/BaseTools/Source/Python/GenFds/Capsule.py
+++ b/BaseTools/Source/Python/GenFds/Capsule.py
@@ -205,7 +205,7 @@ class Capsule (CapsuleClassObject) :
return GenFds.ImageBinDict[self.UiCapsuleName.upper() + 'cap']
GenFdsGlobalVariable.InfLogger( "\nGenerate %s Capsule" %self.UiCapsuleName)
- if ('CAPSULE_GUID' in self.TokensDict and
+ if ('CAPSULE_GUID' in self.TokensDict and
uuid.UUID(self.TokensDict['CAPSULE_GUID']) == uuid.UUID('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')):
return self.GenFmpCapsule()
diff --git a/BaseTools/Source/Python/GenFds/CapsuleData.py b/BaseTools/Source/Python/GenFds/CapsuleData.py
index 83b2731110..9d17bf5afe 100644
--- a/BaseTools/Source/Python/GenFds/CapsuleData.py
+++ b/BaseTools/Source/Python/GenFds/CapsuleData.py
@@ -32,13 +32,13 @@ class CapsuleData:
# @param self The object pointer
def __init__(self):
pass
-
+
## generate capsule data
#
# @param self The object pointer
def GenCapsuleSubItem(self):
pass
-
+
## FFS class for capsule data
#
#
@@ -119,7 +119,7 @@ class CapsuleFd (CapsuleData):
else:
FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName)
return FdFile
-
+
## AnyFile class for capsule data
#
#
@@ -139,7 +139,7 @@ class CapsuleAnyFile (CapsuleData):
#
def GenCapsuleSubItem(self):
return self.FileName
-
+
## Afile class for capsule data
#
#
@@ -208,11 +208,11 @@ class CapsulePayload(CapsuleData):
Guid = self.ImageTypeId.split('-')
Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',
int(self.Version, 16),
- int(Guid[0], 16),
- int(Guid[1], 16),
- int(Guid[2], 16),
- int(Guid[3][-4:-2], 16),
- int(Guid[3][-2:], 16),
+ int(Guid[0], 16),
+ int(Guid[1], 16),
+ int(Guid[2], 16),
+ int(Guid[3][-4:-2], 16),
+ int(Guid[3][-2:], 16),
int(Guid[4][-12:-10], 16),
int(Guid[4][-10:-8], 16),
int(Guid[4][-8:-6], 16),
diff --git a/BaseTools/Source/Python/GenFds/EfiSection.py b/BaseTools/Source/Python/GenFds/EfiSection.py
index 8ac37dd96b..9223268749 100644
--- a/BaseTools/Source/Python/GenFds/EfiSection.py
+++ b/BaseTools/Source/Python/GenFds/EfiSection.py
@@ -55,7 +55,7 @@ class EfiSection (EfiSectionClassObject):
# @retval tuple (Generated file name list, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False) :
-
+
if self.FileName is not None and self.FileName.startswith('PCD('):
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
"""Prepare the parameter of GenSection"""
@@ -155,7 +155,7 @@ class EfiSection (EfiSectionClassObject):
BuildNumTuple = tuple()
BuildNumString = ' ' + ' '.join(BuildNumTuple)
- #if VerString == '' and
+ #if VerString == '' and
if BuildNumString == '':
if self.Optional == True :
GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
@@ -240,7 +240,7 @@ class EfiSection (EfiSectionClassObject):
Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType))
File = GenFdsGlobalVariable.MacroExtend(File, Dict)
-
+
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
ImageObj = PeImageClass (File)
@@ -284,7 +284,7 @@ class EfiSection (EfiSectionClassObject):
IsMakefile = IsMakefile
)
File = StrippedFile
-
+
"""For TE Section call GenFw to generate TE image"""
if SectionType == BINARY_FILE_TYPE_TE:
diff --git a/BaseTools/Source/Python/GenFds/Fd.py b/BaseTools/Source/Python/GenFds/Fd.py
index 3305a470ed..552719fa31 100644
--- a/BaseTools/Source/Python/GenFds/Fd.py
+++ b/BaseTools/Source/Python/GenFds/Fd.py
@@ -103,7 +103,7 @@ class FD(FDClassObject):
pass
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFds.ImageBinDict, self.vtfRawDict, self.DefineVarDict)
-
+
FdBuffer = BytesIO('')
PreviousRegionStart = -1
PreviousRegionSize = 1
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py
index b57ffc778f..67217c3b89 100644
--- a/BaseTools/Source/Python/GenFds/FdfParser.py
+++ b/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -173,7 +173,7 @@ class IncludeFileProfile :
self.InsertAdjust = 0
self.IncludeFileList = []
self.Level = 1 # first level include file
-
+
def GetTotalLines(self):
TotalLines = self.InsertAdjust + len(self.FileLinesList)
@@ -191,7 +191,7 @@ class IncludeFileProfile :
def GetLineInFile(self, Line):
if not self.IsLineInFile (Line):
return (self.FileName, -1)
-
+
InsertedLines = self.InsertStartLineNumber
for Profile in self.IncludeFileList:
@@ -233,7 +233,7 @@ class FileProfile :
# ECC will use this Dict and List information
self.PcdFileLineDict = {}
self.InfFileLineList = []
-
+
self.FdDict = {}
self.FdNameNotSet = False
self.FvDict = {}
@@ -339,11 +339,11 @@ class FdfParser:
#
# @param self The object pointer
# @param DestLine Optional new destination line number.
- # @param DestOffset Optional new destination offset.
+ # @param DestOffset Optional new destination offset.
#
- def Rewind(self, DestLine = 1, DestOffset = 0):
- self.CurrentLineNumber = DestLine
- self.CurrentOffsetWithinLine = DestOffset
+ def Rewind(self, DestLine = 1, DestOffset = 0):
+ self.CurrentLineNumber = DestLine
+ self.CurrentOffsetWithinLine = DestOffset
## __UndoOneChar() method
#
@@ -459,7 +459,7 @@ class FdfParser:
if MacroName.startswith('!'):
NotFlag = True
MacroName = MacroName[1:].strip()
-
+
if not MacroName.startswith('$(') or not MacroName.endswith(')'):
raise Warning("Macro name expected(Please use '$(%(Token)s)' if '%(Token)s' is a macro.)" % {"Token" : MacroName},
self.FileName, self.CurrentLineNumber)
@@ -599,7 +599,7 @@ class FdfParser:
# @param self The object pointer
#
def PreprocessIncludeFile(self):
- # nested include support
+ # nested include support
Processed = False
MacroDict = {}
while self.__GetNextToken():
@@ -664,7 +664,7 @@ class FdfParser:
IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
ErrorCode = IncludedFile1.Validate()[0]
if ErrorCode != 0:
- raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), PlatformDir, GlobalData.gWorkspace),
+ raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), PlatformDir, GlobalData.gWorkspace),
self.FileName, self.CurrentLineNumber)
if not IsValidInclude (IncludedFile1.Path, self.CurrentLineNumber):
@@ -707,18 +707,18 @@ class FdfParser:
Processed = False
# Preprocess done.
self.Rewind()
-
+
@staticmethod
def __GetIfListCurrentItemStat(IfList):
if len(IfList) == 0:
return True
-
+
for Item in IfList:
if Item[1] == False:
return False
-
+
return True
-
+
## PreprocessConditionalStatement() method
#
# Preprocess conditional statement.
@@ -778,7 +778,7 @@ class FdfParser:
Macro = self.__Token
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
-
+
Value = self.__GetExpression()
self.__SetMacroValue(Macro, Value)
self.__WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
@@ -808,7 +808,7 @@ class FdfParser:
CondLabel = self.__Token
Expression = self.__GetExpression()
-
+
if CondLabel == '!if':
ConditionSatisfied = self.__EvaluateConditional(Expression, IfList[-1][0][0] + 1, 'eval')
else:
@@ -819,7 +819,7 @@ class FdfParser:
BranchDetermined = ConditionSatisfied
IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
if ConditionSatisfied:
- self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
elif self.__Token in ('!elseif', '!else'):
ElseStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
if len(IfList) <= 0:
@@ -891,7 +891,7 @@ class FdfParser:
ScopeMacro = self.__MacroDict[TAB_COMMON, TAB_COMMON, TAB_COMMON]
if ScopeMacro:
MacroDict.update(ScopeMacro)
-
+
# Section macro
ScopeMacro = self.__MacroDict[
self.__CurSection[0],
@@ -923,12 +923,12 @@ class FdfParser:
else:
return ValueExpression(Expression, MacroPcdDict)()
except WrnExpression as Excpt:
- #
+ #
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
- File=self.FileName, ExtraData=self.__CurrentLine(),
+ File=self.FileName, ExtraData=self.__CurrentLine(),
Line=Line)
return Excpt.result
except Exception as Excpt:
@@ -947,7 +947,7 @@ class FdfParser:
raise Warning(str(Excpt), self.FileName, Line)
else:
if Expression.startswith('$(') and Expression[-1] == ')':
- Expression = Expression[2:-1]
+ Expression = Expression[2:-1]
return Expression in MacroPcdDict
## __IsToken() method
@@ -1432,9 +1432,9 @@ class FdfParser:
self.__UndoToken()
self.__GetSetStatement(None)
continue
-
+
Macro = self.__Token
-
+
if not self.__IsToken("="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextToken() or self.__Token.startswith('['):
@@ -1492,7 +1492,7 @@ class FdfParser:
else:
raise Warning("expected FdName in [FD.] section", self.FileName, self.CurrentLineNumber)
self.CurrentFdName = FdName.upper()
-
+
if self.CurrentFdName in self.Profile.FdDict:
raise Warning("Unexpected the same FD name", self.FileName, self.CurrentLineNumber)
@@ -1578,12 +1578,12 @@ class FdfParser:
if self.__IsKeyword( "BaseAddress"):
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
-
+
if not self.__GetNextHexNumber():
raise Warning("expected Hex base address", self.FileName, self.CurrentLineNumber)
-
+
Obj.BaseAddress = self.__Token
-
+
if self.__IsToken( "|"):
pcdPair = self.__GetNextPcdName()
Obj.BaseAddressPcd = pcdPair
@@ -1595,7 +1595,7 @@ class FdfParser:
if self.__IsKeyword( "Size"):
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
-
+
if not self.__GetNextHexNumber():
raise Warning("expected Hex size", self.FileName, self.CurrentLineNumber)
@@ -1612,13 +1612,13 @@ class FdfParser:
if self.__IsKeyword( "ErasePolarity"):
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
-
+
if not self.__GetNextToken():
raise Warning("expected Erase Polarity", self.FileName, self.CurrentLineNumber)
-
+
if self.__Token != "1" and self.__Token != "0":
raise Warning("expected 1 or 0 Erase Polarity", self.FileName, self.CurrentLineNumber)
-
+
Obj.ErasePolarity = self.__Token
return True
@@ -1666,7 +1666,7 @@ class FdfParser:
IsBlock = False
while self.__GetBlockStatement(Obj):
IsBlock = True
-
+
Item = Obj.BlockSizeList[-1]
if Item[0] is None or Item[1] is None:
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
@@ -1835,7 +1835,7 @@ class FdfParser:
# @retval False Not able to find
#
def __GetRegionLayout(self, Fd):
- Offset = self.__CalcRegionExpr()
+ Offset = self.__CalcRegionExpr()
if Offset is None:
return False
@@ -2151,9 +2151,9 @@ class FdfParser:
while True:
self.__GetSetStatements(FvObj)
- if not (self.__GetBlockStatement(FvObj) or self.__GetFvBaseAddress(FvObj) or
- self.__GetFvForceRebase(FvObj) or self.__GetFvAlignment(FvObj) or
- self.__GetFvAttributes(FvObj) or self.__GetFvNameGuid(FvObj) or
+ if not (self.__GetBlockStatement(FvObj) or self.__GetFvBaseAddress(FvObj) or
+ self.__GetFvForceRebase(FvObj) or self.__GetFvAlignment(FvObj) or
+ self.__GetFvAttributes(FvObj) or self.__GetFvNameGuid(FvObj) or
self.__GetFvExtEntryStatement(FvObj) or self.__GetFvNameString(FvObj)):
break
@@ -2198,7 +2198,7 @@ class FdfParser:
raise Warning("Unknown alignment value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
Obj.FvAlignment = self.__Token
return True
-
+
## __GetFvBaseAddress() method
#
# Get BaseAddress for FV
@@ -2222,8 +2222,8 @@ class FdfParser:
if not BaseAddrValuePattern.match(self.__Token.upper()):
raise Warning("Unknown FV base address value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
Obj.FvBaseAddress = self.__Token
- return True
-
+ return True
+
## __GetFvForceRebase() method
#
# Get FvForceRebase for FV
@@ -2246,14 +2246,14 @@ class FdfParser:
if self.__Token.upper() not in ["TRUE", "FALSE", "0", "0X0", "0X00", "1", "0X1", "0X01"]:
raise Warning("Unknown FvForceRebase value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
-
+
if self.__Token.upper() in ["TRUE", "1", "0X1", "0X01"]:
Obj.FvForceRebase = True
elif self.__Token.upper() in ["FALSE", "0", "0X0", "0X00"]:
Obj.FvForceRebase = False
else:
Obj.FvForceRebase = None
-
+
return True
@@ -2288,7 +2288,7 @@ class FdfParser:
FvObj.FvAttributeDict[name] = self.__Token
return IsWordToken
-
+
## __GetFvNameGuid() method
#
# Get FV GUID for FV
@@ -2334,7 +2334,7 @@ class FdfParser:
if not self.__IsKeyword ("TYPE"):
raise Warning("expected 'TYPE'", self.FileName, self.CurrentLineNumber)
-
+
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
@@ -2355,7 +2355,7 @@ class FdfParser:
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
-
+
if not self.__IsToken( "{"):
raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
@@ -2386,13 +2386,13 @@ class FdfParser:
FvObj.FvExtEntryData.append(DataString)
if self.__Token == 'FILE':
-
+
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
-
+
if not self.__GetNextToken():
raise Warning("expected FV Extension Entry file path At Line ", self.FileName, self.CurrentLineNumber)
-
+
FvObj.FvExtEntryData.append(self.__Token)
if not self.__IsToken( "}"):
@@ -2555,7 +2555,7 @@ class FdfParser:
raise Warning("expected ARCH name", self.FileName, self.CurrentLineNumber)
FfsInfObj.UseArch = self.__Token
-
+
if self.__GetNextToken():
p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\))')
if p.match(self.__Token) and p.match(self.__Token).span()[1] == len(self.__Token):
@@ -2596,7 +2596,7 @@ class FdfParser:
self.__UndoToken()
self.__UndoToken()
return False
-
+
FfsFileObj = FfsFileStatement.FileStatement()
FfsFileObj.FvFileType = self.__Token
@@ -2613,9 +2613,9 @@ class FdfParser:
if not self.__IsToken( ")"):
raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
-
+
FfsFileObj.NameGuid = self.__Token
-
+
self.__GetFilePart( FfsFileObj, MacroDict.copy())
if ForCapsule:
@@ -2891,7 +2891,7 @@ class FdfParser:
else:
VerSectionObj.FileName = self.__Token
Obj.SectionList.append(VerSectionObj)
-
+
elif self.__IsKeyword( BINARY_FILE_TYPE_UI):
if AlignValue == 'Auto':
raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
@@ -3345,7 +3345,7 @@ class FdfParser:
Value = self.__Token.strip()
else:
Value = self.__Token.strip()
- Obj.TokensDict[Name] = Value
+ Obj.TokensDict[Name] = Value
if not self.__GetNextToken():
return False
self.__UndoToken()
@@ -3487,7 +3487,7 @@ class FdfParser:
if not self.__GetNextToken():
raise Warning("expected File name", self.FileName, self.CurrentLineNumber)
-
+
AnyFileName = self.__Token
self.__VerifyFile(AnyFileName)
@@ -3520,7 +3520,7 @@ class FdfParser:
else:
CapsuleObj.CapsuleDataList.append(CapsuleAnyFile)
return True
-
+
## __GetAfileStatement() method
#
# Get Afile for capsule
@@ -3540,14 +3540,14 @@ class FdfParser:
if not self.__GetNextToken():
raise Warning("expected Afile name", self.FileName, self.CurrentLineNumber)
-
+
AfileName = self.__Token
AfileBaseName = os.path.basename(AfileName)
-
+
if os.path.splitext(AfileBaseName)[1] not in [".bin", ".BIN", ".Bin", ".dat", ".DAT", ".Dat", ".data", ".DATA", ".Data"]:
raise Warning('invalid binary file type, should be one of "bin",BINARY_FILE_TYPE_BIN,"Bin","dat","DAT","Dat","data","DATA","Data"', \
self.FileName, self.CurrentLineNumber)
-
+
if not os.path.isabs(AfileName):
AfileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(AfileName)
self.__VerifyFile(AfileName)
@@ -3701,7 +3701,7 @@ class FdfParser:
if not self.__IsToken( ")"):
raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
-
+
NameGuid = self.__Token
KeepReloc = None
@@ -3963,11 +3963,11 @@ class FdfParser:
elif self.__GetNextToken():
if self.__Token not in ("}", "COMPAT16", BINARY_FILE_TYPE_PE32, BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE, "FV_IMAGE", "RAW", BINARY_FILE_TYPE_DXE_DEPEX,\
BINARY_FILE_TYPE_UI, "VERSION", BINARY_FILE_TYPE_PEI_DEPEX, BINARY_FILE_TYPE_GUID, BINARY_FILE_TYPE_SMM_DEPEX):
-
+
if self.__Token.startswith('PCD'):
self.__UndoToken()
self.__GetNextWord()
-
+
if self.__Token == 'PCD':
if not self.__IsToken( "("):
raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
@@ -3975,9 +3975,9 @@ class FdfParser:
if not self.__IsToken( ")"):
raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
-
- EfiSectionObj.FileName = self.__Token
-
+
+ EfiSectionObj.FileName = self.__Token
+
else:
self.__UndoToken()
else:
@@ -4364,7 +4364,7 @@ class FdfParser:
self.SectionParser(S)
self.__UndoToken()
return False
-
+
self.__UndoToken()
if not self.__IsToken("[OptionRom.", True):
raise Warning("Unknown Keyword '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
@@ -4383,7 +4383,7 @@ class FdfParser:
isFile = self.__GetOptRomFileStatement(OptRomObj)
if not isInf and not isFile:
break
-
+
return True
## __GetOptRomInfStatement() method
@@ -4424,9 +4424,9 @@ class FdfParser:
else:
self.Profile.InfDict['ArchTBD'].append(ffsInf.InfFileName)
-
+
self.__GetOptRomOverrides (ffsInf)
-
+
Obj.FfsList.append(ffsInf)
return True
@@ -4488,7 +4488,7 @@ class FdfParser:
EdkLogger.error("FdfParser", FORMAT_INVALID, File=self.FileName, Line=self.CurrentLineNumber)
Obj.OverrideAttribs = Overrides
-
+
## __GetOptRomFileStatement() method
#
# Get FILE statements
@@ -4520,7 +4520,7 @@ class FdfParser:
if FfsFileObj.FileType == 'EFI':
self.__GetOptRomOverrides(FfsFileObj)
-
+
Obj.FfsList.append(FfsFileObj)
return True
@@ -4562,7 +4562,7 @@ class FdfParser:
if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:
RefFvList.append (CapsuleDataObj.FvName.upper())
elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:
- RefFdList.append (CapsuleDataObj.FdName.upper())
+ RefFdList.append (CapsuleDataObj.FdName.upper())
elif CapsuleDataObj.Ffs is not None:
if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):
if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
@@ -4657,7 +4657,7 @@ class FdfParser:
RefFvStack = []
RefFvStack.append(FvName)
FdAnalyzedList = []
-
+
Index = 0
while RefFvStack != [] and Index < MaxLength:
Index = Index + 1
@@ -4710,7 +4710,7 @@ class FdfParser:
RefCapStack.append(CapName)
FdAnalyzedList = []
FvAnalyzedList = []
-
+
Index = 0
while RefCapStack != [] and Index < MaxLength:
Index = Index + 1
diff --git a/BaseTools/Source/Python/GenFds/Ffs.py b/BaseTools/Source/Python/GenFds/Ffs.py
index df585f3d81..598b2d0231 100644
--- a/BaseTools/Source/Python/GenFds/Ffs.py
+++ b/BaseTools/Source/Python/GenFds/Ffs.py
@@ -39,7 +39,7 @@ class Ffs(FDClassObject):
SUP_MODULE_MM_STANDALONE : 'EFI_FV_FILETYPE_MM_STANDALONE',
SUP_MODULE_MM_CORE_STANDALONE : 'EFI_FV_FILETYPE_MM_CORE_STANDALONE'
}
-
+
# mapping between section type in FDF and file suffix
SectionSuffix = {
BINARY_FILE_TYPE_PE32 : '.pe32',
@@ -51,14 +51,14 @@ class Ffs(FDClassObject):
'COMPAT16' : '.com16',
'RAW' : '.raw',
'FREEFORM_SUBTYPE_GUID': '.guid',
- 'SUBTYPE_GUID' : '.guid',
+ 'SUBTYPE_GUID' : '.guid',
'FV_IMAGE' : 'fv.sec',
'COMPRESS' : '.com',
'GUIDED' : '.guided',
BINARY_FILE_TYPE_PEI_DEPEX : '.dpx',
BINARY_FILE_TYPE_SMM_DEPEX : '.dpx'
}
-
+
## The constructor
#
# @param self The object pointer
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
index f5de57d0ac..5f31ac03fc 100644
--- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
@@ -58,7 +58,7 @@ class FileStatement (FileStatementClassObject) :
# @retval string Generated FFS file name
#
def GenFfs(self, Dict = {}, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):
-
+
if self.NameGuid is not None and self.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
if len(PcdValue) == 0:
@@ -71,7 +71,7 @@ class FileStatement (FileStatementClassObject) :
EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
% (self.NameGuid))
self.NameGuid = RegistryGuidStr
-
+
Str = self.NameGuid
if FvName:
Str += FvName
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
index ef34dbf007..adb9a95beb 100644
--- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -225,7 +225,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
EdkLogger.warn("GenFds", GENFDS_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % (self.InfFileName))
if self.ModuleType == SUP_MODULE_SMM_CORE and int(self.PiSpecVersion, 16) < 0x0001000A:
- EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.InfFileName)
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.InfFileName)
if self.ModuleType == SUP_MODULE_MM_CORE_STANDALONE and int(self.PiSpecVersion, 16) < 0x00010032:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
@@ -374,13 +374,13 @@ class FfsInfStatement(FfsInfStatementClassObject):
def PatchEfiFile(self, EfiFile, FileType):
#
# If the module does not have any patches, then return path to input file
- #
+ #
if not self.PatchPcds:
return EfiFile
#
# Only patch file if FileType is PE32 or ModuleType is USER_DEFINED
- #
+ #
if FileType != BINARY_FILE_TYPE_PE32 and self.ModuleType != SUP_MODULE_USER_DEFINED:
return EfiFile
@@ -398,7 +398,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# If a different file from the same module has already been patched, then generate an error
- #
+ #
if self.PatchedBinFile:
EdkLogger.error("GenFds", GENFDS_ERROR,
'Only one binary file can be patched:\n'
@@ -408,12 +408,12 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# Copy unpatched file contents to output file location to perform patching
- #
+ #
CopyLongFilePath(EfiFile, Output)
#
# Apply patches to patched output file
- #
+ #
for Pcd, Value in self.PatchPcds:
RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Value, Pcd.MaxDatumSize)
if RetVal:
@@ -421,12 +421,12 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# Save the path of the patched output file
- #
+ #
self.PatchedBinFile = Output
#
# Return path to patched output file
- #
+ #
return Output
## GenFfs() method
@@ -448,14 +448,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
Arch = self.GetCurrentArch()
SrcFile = mws.join( GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName);
DestFile = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
-
+
SrcFileDir = "."
SrcPath = os.path.dirname(SrcFile)
SrcFileName = os.path.basename(SrcFile)
- SrcFileBase, SrcFileExt = os.path.splitext(SrcFileName)
+ SrcFileBase, SrcFileExt = os.path.splitext(SrcFileName)
DestPath = os.path.dirname(DestFile)
DestFileName = os.path.basename(DestFile)
- DestFileBase, DestFileExt = os.path.splitext(DestFileName)
+ DestFileBase, DestFileExt = os.path.splitext(DestFileName)
self.MacroDict = {
# source file
"${src}" : SrcFile,
@@ -473,7 +473,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
}
#
# Allow binary type module not specify override rule in FDF file.
- #
+ #
if len(self.BinFileList) > 0:
if self.Rule is None or self.Rule == "":
self.Rule = "BINARY"
@@ -534,7 +534,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
'$(NAMED_GUID)' : self.ModuleGuid
}
String = GenFdsGlobalVariable.MacroExtend(String, MacroDict)
- String = GenFdsGlobalVariable.MacroExtend(String, self.MacroDict)
+ String = GenFdsGlobalVariable.MacroExtend(String, self.MacroDict)
return String
## __GetRule__() method
@@ -960,14 +960,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
Sect.FvAddr = FvChildAddr
if FvParentAddr is not None and isinstance(Sect, GuidSection):
Sect.FvParentAddr = FvParentAddr
-
+
if Rule.KeyStringList != []:
SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, Rule.KeyStringList, self, IsMakefile = IsMakefile)
else :
SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, self.KeyStringList, self, IsMakefile = IsMakefile)
-
+
if not HasGeneratedFlag:
- UniVfrOffsetFileSection = ""
+ UniVfrOffsetFileSection = ""
ModuleFileName = mws.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName)
InfData = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(ModuleFileName), self.CurrentArch]
#
@@ -978,16 +978,16 @@ class FfsInfStatement(FfsInfStatementClassObject):
for SourceFile in InfData.Sources:
if SourceFile.Type.upper() == ".VFR" :
#
- # search the .map file to find the offset of vfr binary in the PE32+/TE file.
+ # search the .map file to find the offset of vfr binary in the PE32+/TE file.
#
VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
if SourceFile.Type.upper() == ".UNI" :
#
- # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
+ # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
#
VfrUniBaseName["UniOffsetName"] = (self.BaseName + "Strings")
-
-
+
+
if len(VfrUniBaseName) > 0:
if IsMakefile:
if InfData.BuildType != 'UEFI_HII':
@@ -1023,7 +1023,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if UniVfrOffsetFileSection:
SectList.append(UniVfrOffsetFileSection)
HasGeneratedFlag = True
-
+
for SecName in SectList :
SectFiles.append(SecName)
SectAlignments.append(Align)
@@ -1071,12 +1071,12 @@ class FfsInfStatement(FfsInfStatementClassObject):
# @param self The object pointer
# @param VfrUniBaseName A name list contain the UNI/INF object name.
# @retval RetValue A list contain offset of UNI/INF object.
- #
+ #
def __GetBuildOutputMapFileVfrUniInfo(self, VfrUniBaseName):
MapFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".map")
EfiFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".efi")
return GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
-
+
## __GenUniVfrOffsetFile() method
#
# Generate the offset file for the module which contain VFR or UNI file.
@@ -1089,7 +1089,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
# Use a instance of StringIO to cache data
fStringIO = BytesIO('')
-
+
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
#
@@ -1099,7 +1099,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
- fStringIO.write(''.join(UniGuid))
+ fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@@ -1110,11 +1110,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
- fStringIO.write(''.join(VfrGuid))
- type (Item[1])
+ fStringIO.write(''.join(VfrGuid))
+ type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
-
+
#
# write data into file.
#
@@ -1122,7 +1122,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
SaveFileOnChange(UniVfrOffsetFileName, fStringIO.getvalue())
except:
EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName, None)
-
+
fStringIO.close ()
-
+
diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py
index d980020680..798c20a0f4 100644
--- a/BaseTools/Source/Python/GenFds/Fv.py
+++ b/BaseTools/Source/Python/GenFds/Fv.py
@@ -53,7 +53,7 @@ class FV (FvClassObject):
self.FvForceRebase = None
self.FvRegionInFD = None
self.UsedSizeEnable = False
-
+
## AddToBuffer()
#
# Generate Fv and add it to the Buffer
@@ -72,7 +72,7 @@ class FV (FvClassObject):
if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict:
return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv']
-
+
#
# Check whether FV in Capsule is in FD flash region.
# If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
@@ -92,7 +92,7 @@ class FV (FvClassObject):
GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV" %self.UiFvName)
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
FFSGuid = None
-
+
if self.FvBaseAddress is not None:
BaseAddress = self.FvBaseAddress
if not Flag:
@@ -289,7 +289,7 @@ class FV (FvClassObject):
if not self._GetBlockSize():
#set default block size is 1
self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)
-
+
for BlockSize in self.BlockSizeList :
if BlockSize[0] is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
@@ -331,7 +331,7 @@ class FV (FvClassObject):
self.FvAlignment.strip() + \
" = TRUE" + \
TAB_LINE_BREAK)
-
+
#
# Generate FV extension header file
#
@@ -387,7 +387,7 @@ class FV (FvClassObject):
TotalSize += (Size + 4)
FvExtFile.seek(0)
Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16))
- Buffer += FvExtFile.read()
+ Buffer += FvExtFile.read()
FvExtFile.close()
if self.FvExtEntryType[Index] == 'DATA':
ByteList = self.FvExtEntryData[Index].split(',')
@@ -418,7 +418,7 @@ class FV (FvClassObject):
FvExtHeaderFileName + \
TAB_LINE_BREAK)
-
+
#
# Add [Files]
#
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py
index acd19e5276..b90b50e539 100644
--- a/BaseTools/Source/Python/GenFds/GenFds.py
+++ b/BaseTools/Source/Python/GenFds/GenFds.py
@@ -47,7 +47,7 @@ from struct import unpack
## Version and Copyright
versionNumber = "1.0" + ' ' + gBUILD_VERSION
__version__ = "%prog Version " + versionNumber
-__copyright__ = "Copyright (c) 2007 - 2017, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
## Tool entrance method
#
@@ -72,10 +72,10 @@ def main():
if Options.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
GenFdsGlobalVariable.VerboseMode = True
-
+
if Options.FixedAddress is not None:
GenFdsGlobalVariable.FixedLoadAddress = True
-
+
if Options.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
if Options.debug is not None:
@@ -100,7 +100,7 @@ def main():
if Options.GenfdsMultiThread:
GenFdsGlobalVariable.EnableGenfdsMultiThread = True
os.chdir(GenFdsGlobalVariable.WorkSpaceDir)
-
+
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(GenFdsGlobalVariable.WorkSpaceDir, PackagesPath)
@@ -228,7 +228,7 @@ def main():
GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
BuildWorkSpace = WorkspaceDatabase(GlobalData.gDatabasePath)
BuildWorkSpace.InitDatabase()
-
+
#
# Get files real name in workspace dir
#
@@ -244,7 +244,7 @@ def main():
TargetArchList = set(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, Options.BuildTarget, Options.ToolChain].SupArchList) & set(ArchList)
if len(TargetArchList) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, "Target ARCH %s not in platform supported ARCH %s" % (str(ArchList), str(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON].SupArchList)))
-
+
for Arch in ArchList:
GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = NormPath(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, Options.BuildTarget, Options.ToolChain].OutputDirectory)
GenFdsGlobalVariable.PlatformName = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, Options.BuildTarget, Options.ToolChain].PlatformName
@@ -551,7 +551,7 @@ class GenFds :
Buffer = BytesIO('')
FvObj.AddToBuffer(Buffer)
Buffer.close()
-
+
if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
@@ -617,7 +617,7 @@ class GenFds :
# @retval None
#
def DisplayFvSpaceInfo(FdfParser):
-
+
FvSpaceInfoList = []
MaxFvNameLength = 0
for FvName in FdfParser.Profile.FvDict:
@@ -644,10 +644,10 @@ class GenFds :
if NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
FreeFound = True
Free = NameValue[1].strip()
-
+
if TotalFound and UsedFound and FreeFound:
FvSpaceInfoList.append((FvName, Total, Used, Free))
-
+
GenFdsGlobalVariable.InfLogger('\nFV Space Information')
for FvSpaceInfo in FvSpaceInfoList:
Name = FvSpaceInfo[0]
@@ -675,18 +675,18 @@ class GenFds :
if PcdObj.TokenCName == 'PcdBsBaseAddress':
PcdValue = PcdObj.DefaultValue
break
-
+
if PcdValue == '':
return
-
+
Int64PcdValue = long(PcdValue, 0)
- if Int64PcdValue == 0 or Int64PcdValue < -1:
+ if Int64PcdValue == 0 or Int64PcdValue < -1:
return
-
+
TopAddress = 0
if Int64PcdValue > 0:
TopAddress = Int64PcdValue
-
+
ModuleDict = BuildDb.BuildObject[DscFile, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].Modules
for Key in ModuleDict:
ModuleObj = BuildDb.BuildObject[Key, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
index 6eb1201cee..eeb3ec2197 100644
--- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
+++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
@@ -65,7 +65,7 @@ class GenFdsGlobalVariable:
FdfFileTimeStamp = 0
FixedLoadAddress = False
PlatformName = ''
-
+
BuildRuleFamily = "MSFT"
ToolChainFamily = "MSFT"
__BuildRuleDatabase = None
@@ -75,7 +75,7 @@ class GenFdsGlobalVariable:
CopyList = []
ModuleFile = ''
EnableGenfdsMultiThread = False
-
+
#
# The list whose element are flags to indicate if large FFS or SECTION files exist in FV.
# At the beginning of each generation of FV, false flag is appended to the list,
@@ -90,7 +90,7 @@ class GenFdsGlobalVariable:
LARGE_FILE_SIZE = 0x1000000
SectionHeader = struct.Struct("3B 1B")
-
+
## LoadBuildRule
#
@staticmethod
@@ -117,7 +117,7 @@ class GenFdsGlobalVariable:
and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY] \
and ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]:
GenFdsGlobalVariable.BuildRuleFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]
-
+
if DataType.TAB_TOD_DEFINES_FAMILY in ToolDefinition \
and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY] \
and ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]:
@@ -229,11 +229,11 @@ class GenFdsGlobalVariable:
while Index < len(SourceList):
Source = SourceList[Index]
Index = Index + 1
-
+
if File.IsBinary and File == Source and Inf.Binaries is not None and File in Inf.Binaries:
# Skip all files that are not binary libraries
if not Inf.LibraryClass:
- continue
+ continue
RuleObject = BuildRules[DataType.TAB_DEFAULT_BINARY_FILE]
elif FileType in BuildRules:
RuleObject = BuildRules[FileType]
@@ -244,15 +244,15 @@ class GenFdsGlobalVariable:
if LastTarget:
TargetList.add(str(LastTarget))
break
-
+
FileType = RuleObject.SourceFileType
-
+
# stop at STATIC_LIBRARY for library
if Inf.LibraryClass and FileType == DataType.TAB_STATIC_LIBRARY:
if LastTarget:
TargetList.add(str(LastTarget))
break
-
+
Target = RuleObject.Apply(Source)
if not Target:
if LastTarget:
@@ -261,11 +261,11 @@ class GenFdsGlobalVariable:
elif not Target.Outputs:
# Only do build for target with outputs
TargetList.add(str(Target))
-
+
# to avoid cyclic rule
if FileType in RuleChain:
break
-
+
RuleChain.append(FileType)
SourceList.extend(Target.Outputs)
LastTarget = Target
@@ -645,19 +645,19 @@ class GenFdsGlobalVariable:
@staticmethod
def GenerateOptionRom(Output, EfiInput, BinaryInput, Compress=False, ClassCode=None,
Revision=None, DeviceId=None, VendorId=None, IsMakefile=False):
- InputList = []
+ InputList = []
Cmd = ["EfiRom"]
if len(EfiInput) > 0:
-
+
if Compress:
Cmd.append("-ec")
else:
Cmd.append("-e")
-
+
for EfiFile in EfiInput:
Cmd.append(EfiFile)
InputList.append (EfiFile)
-
+
if len(BinaryInput) > 0:
Cmd.append("-b")
for BinFile in BinaryInput:
@@ -668,7 +668,7 @@ class GenFdsGlobalVariable:
if not GenFdsGlobalVariable.NeedsUpdate(Output, InputList) and not IsMakefile:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
-
+
if ClassCode is not None:
Cmd += ("-l", ClassCode)
if Revision is not None:
@@ -811,7 +811,7 @@ class GenFdsGlobalVariable:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
if PcdObj.DatumType != DataType.TAB_VOID:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
-
+
PcdValue = PcdObj.DefaultValue
return PcdValue
@@ -827,7 +827,7 @@ class GenFdsGlobalVariable:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
if PcdObj.DatumType != DataType.TAB_VOID:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
-
+
PcdValue = PcdObj.DefaultValue
return PcdValue
diff --git a/BaseTools/Source/Python/GenFds/GuidSection.py b/BaseTools/Source/Python/GenFds/GuidSection.py
index bda185476b..c55fb34f2b 100644
--- a/BaseTools/Source/Python/GenFds/GuidSection.py
+++ b/BaseTools/Source/Python/GenFds/GuidSection.py
@@ -76,7 +76,7 @@ class GuidSection(GuidSectionClassObject) :
FvAddrIsSet = True
else:
FvAddrIsSet = False
-
+
if self.ProcessRequired in ("TRUE", "1"):
if self.FvAddr != []:
#no use FvAddr when the image is processed.
diff --git a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
index 4ef9b4d0e9..8b6d2a1cb0 100644
--- a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
@@ -1,7 +1,7 @@
## @file
# process OptionROM generation from FILE statement
#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -18,7 +18,7 @@
import Common.LongFilePathOs as os
from GenFdsGlobalVariable import GenFdsGlobalVariable
-##
+##
#
#
class OptRomFileStatement:
@@ -40,10 +40,10 @@ class OptRomFileStatement:
# @retval string Generated FFS file name
#
def GenFfs(self, Dict = {}, IsMakefile=False):
-
+
if self.FileName is not None:
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
-
+
return self.FileName
diff --git a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
index 79891b3b82..dff8235ef7 100644
--- a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
@@ -1,7 +1,7 @@
## @file
# process OptionROM generation from INF statement
#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -26,7 +26,7 @@ from Common.StringUtils import *
from FfsInfStatement import FfsInfStatement
from GenFdsGlobalVariable import GenFdsGlobalVariable
-##
+##
#
#
class OptRomInfStatement (FfsInfStatement):
@@ -45,7 +45,7 @@ class OptRomInfStatement (FfsInfStatement):
# @param self The object pointer
#
def __GetOptRomParams(self):
-
+
if self.OverrideAttribs is None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
@@ -59,21 +59,21 @@ class OptRomInfStatement (FfsInfStatement):
if self.OverrideAttribs.PciVendorId is None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
-
+
if self.OverrideAttribs.PciClassCode is None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
-
+
if self.OverrideAttribs.PciDeviceId is None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
-
+
if self.OverrideAttribs.PciRevision is None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
-
-# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
+
+# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
# for Record in RecordList:
# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
-# Name = Record[0]
+# Name = Record[0]
## GenFfs() method
#
# Generate FFS
@@ -147,8 +147,8 @@ class OptRomInfStatement (FfsInfStatement):
OutputFileList.append(GenSecInputFile)
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
- OutputFileList.extend(FileList)
-
+ OutputFileList.extend(FileList)
+
return OutputFileList
- \ No newline at end of file
+
diff --git a/BaseTools/Source/Python/GenFds/OptionRom.py b/BaseTools/Source/Python/GenFds/OptionRom.py
index 755eb01da7..18f3fbd0d7 100644
--- a/BaseTools/Source/Python/GenFds/OptionRom.py
+++ b/BaseTools/Source/Python/GenFds/OptionRom.py
@@ -1,7 +1,7 @@
## @file
# process OptionROM generation
#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -28,7 +28,7 @@ from Common.BuildToolError import *
T_CHAR_LF = '\n'
-##
+##
#
#
class OPTIONROM (OptionRomClassObject):
@@ -57,7 +57,7 @@ class OPTIONROM (OptionRomClassObject):
# Process Modules in FfsList
for FfsFile in self.FfsList :
-
+
if isinstance(FfsFile, OptRomInfStatement.OptRomInfStatement):
FilePathNameList = FfsFile.GenFfs(IsMakefile=Flag)
if len(FilePathNameList) == 0:
@@ -70,14 +70,14 @@ class OPTIONROM (OptionRomClassObject):
if not os.path.exists(TmpOutputDir) :
os.makedirs(TmpOutputDir)
TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
-
- GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
- FilePathNameList,
- [],
- FfsFile.OverrideAttribs.NeedCompress,
- FfsFile.OverrideAttribs.PciClassCode,
- FfsFile.OverrideAttribs.PciRevision,
- FfsFile.OverrideAttribs.PciDeviceId,
+
+ GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
+ FilePathNameList,
+ [],
+ FfsFile.OverrideAttribs.NeedCompress,
+ FfsFile.OverrideAttribs.PciClassCode,
+ FfsFile.OverrideAttribs.PciRevision,
+ FfsFile.OverrideAttribs.PciDeviceId,
FfsFile.OverrideAttribs.PciVendorId,
IsMakefile = Flag)
BinFileList.append(TmpOutputFile)
@@ -89,14 +89,14 @@ class OPTIONROM (OptionRomClassObject):
if not os.path.exists(TmpOutputDir) :
os.makedirs(TmpOutputDir)
TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
-
- GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
- [FilePathName],
- [],
- FfsFile.OverrideAttribs.NeedCompress,
- FfsFile.OverrideAttribs.PciClassCode,
- FfsFile.OverrideAttribs.PciRevision,
- FfsFile.OverrideAttribs.PciDeviceId,
+
+ GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
+ [FilePathName],
+ [],
+ FfsFile.OverrideAttribs.NeedCompress,
+ FfsFile.OverrideAttribs.PciClassCode,
+ FfsFile.OverrideAttribs.PciRevision,
+ FfsFile.OverrideAttribs.PciDeviceId,
FfsFile.OverrideAttribs.PciVendorId,
IsMakefile=Flag)
BinFileList.append(TmpOutputFile)
@@ -105,13 +105,13 @@ class OPTIONROM (OptionRomClassObject):
EfiFileList.append(FilePathName)
else:
BinFileList.append(FilePathName)
-
+
#
# Call EfiRom tool
#
OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName)
OutputFile = OutputFile + '.rom'
-
+
GenFdsGlobalVariable.GenerateOptionRom(
OutputFile,
EfiFileList,
@@ -121,17 +121,17 @@ class OPTIONROM (OptionRomClassObject):
if not Flag:
GenFdsGlobalVariable.InfLogger( "\nGenerate %s Option ROM Successfully" %self.DriverName)
GenFdsGlobalVariable.SharpCounter = 0
-
+
return OutputFile
class OverrideAttribs:
-
+
## The constructor
#
# @param self The object pointer
#
def __init__(self):
-
+
self.PciVendorId = None
self.PciClassCode = None
self.PciDeviceId = None
diff --git a/BaseTools/Source/Python/GenFds/Region.py b/BaseTools/Source/Python/GenFds/Region.py
index 3b7e30ec85..33e4ac8d3c 100644
--- a/BaseTools/Source/Python/GenFds/Region.py
+++ b/BaseTools/Source/Python/GenFds/Region.py
@@ -311,7 +311,7 @@ class Region(RegionClassObject):
if self.Offset >= End:
Start = End
continue
- # region located in current blocks
+ # region located in current blocks
else:
# region ended within current blocks
if self.Offset + self.Size <= End:
@@ -363,5 +363,5 @@ class Region(RegionClassObject):
else:
Index += 1
-
+
diff --git a/BaseTools/Source/Python/GenFds/Section.py b/BaseTools/Source/Python/GenFds/Section.py
index 5895998158..ca4705a90c 100644
--- a/BaseTools/Source/Python/GenFds/Section.py
+++ b/BaseTools/Source/Python/GenFds/Section.py
@@ -160,7 +160,7 @@ class Section (SectionClassObject):
SuffixMap = FfsInf.GetFinalTargetSuffixMap()
if Suffix in SuffixMap:
FileList.extend(SuffixMap[Suffix])
-
+
#Process the file lists is alphabetical for a same section type
if len (FileList) > 1:
FileList.sort()
diff --git a/BaseTools/Source/Python/GenFds/Vtf.py b/BaseTools/Source/Python/GenFds/Vtf.py
index 18ea37b9af..83abc98f07 100644
--- a/BaseTools/Source/Python/GenFds/Vtf.py
+++ b/BaseTools/Source/Python/GenFds/Vtf.py
@@ -1,7 +1,7 @@
## @file
# process VTF generation
#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -25,7 +25,7 @@ T_CHAR_LF = '\n'
#
#
class Vtf (VtfClassObject):
-
+
## The constructor
#
# @param self The object pointer
@@ -46,7 +46,7 @@ class Vtf (VtfClassObject):
OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.Vtf')
BaseAddArg = self.GetBaseAddressArg(FdAddressDict)
OutputArg, VtfRawDict = self.GenOutputArg()
-
+
Cmd = (
'GenVtf',
) + OutputArg + (
@@ -55,9 +55,9 @@ class Vtf (VtfClassObject):
GenFdsGlobalVariable.CallExternalTool(Cmd, "GenFv -Vtf Failed!")
GenFdsGlobalVariable.SharpCounter = 0
-
+
return VtfRawDict
-
+
## GenBsfInf() method
#
# Generate inf used to generate VTF
@@ -154,7 +154,7 @@ class Vtf (VtfClassObject):
for component in self.ComponentStatementList :
if component.CompLoc.upper() != 'NONE' and not (component.CompLoc.upper() in FvList):
FvList.append(component.CompLoc.upper())
-
+
return FvList
## GetBaseAddressArg() method
@@ -173,13 +173,13 @@ class Vtf (VtfClassObject):
'-s', '0x%x' % Size,
)
return CmdStr
-
+
## GenOutputArg() method
#
# Get output arguments for GenVtf
#
# @param self The object pointer
- #
+ #
def GenOutputArg(self):
FvVtfDict = {}
OutputFileName = ''
@@ -192,6 +192,6 @@ class Vtf (VtfClassObject):
OutputFileName = os.path.join(GenFdsGlobalVariable.FvDir, OutputFileName)
Arg += ('-o', OutputFileName)
FvVtfDict[FvObj.upper()] = OutputFileName
-
+
return Arg, FvVtfDict
-
+
diff --git a/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py b/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
index 9645e9b08d..1be0a28177 100644
--- a/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
+++ b/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
@@ -1,7 +1,7 @@
## @file
# Generate PCD table for 'Patchable In Module' type PCD with given .map file.
# The Patch PCD table like:
-#
+#
# PCD Name Offset in binary
# ======== ================
#
@@ -40,9 +40,9 @@ __copyright__ = "Copyright (c) 2008 - 2018, Intel Corporation. All rights reserv
symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.\-:\\\\\w\?@\$<>]+) +([\da-fA-F]+)', re.UNICODE)
def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
- """ Parse map file to get binary patch pcd information
+ """ Parse map file to get binary patch pcd information
@param path Map file absolution path
-
+
@return a list which element hold (PcdName, Offset, SectionName)
"""
lines = []
@@ -52,7 +52,7 @@ def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
f.close()
except:
return None
-
+
if len(lines) == 0: return None
firstline = lines[0].strip()
if (firstline.startswith("Archive member included ") and
@@ -111,7 +111,7 @@ def _parseForGCC(lines, efifilepath):
m = pcdPatternGcc.match(lines[index + 1].strip())
if m is not None:
bpcds.append((PcdName, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
-
+
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
@@ -129,11 +129,11 @@ def _parseForGCC(lines, efifilepath):
#assert efisec[0].strip() == pcd[3].strip() and efisec[1] + redirection == pcd[2], "There are some differences between map file and efi file"
pcds.append([pcd[0], efisec[2] + pcd[1] - efisec[1] - redirection, efisec[0]])
return pcds
-
+
def _parseGeneral(lines, efifilepath):
- """ For MSFT, ICC, EBC
+ """ For MSFT, ICC, EBC
@param lines line array for map file
-
+
@return a list which element hold (PcdName, Offset, SectionName)
"""
status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
@@ -177,7 +177,7 @@ def _parseGeneral(lines, efifilepath):
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
return None
-
+
pcds = []
for pcd in bPcds:
index = 0
@@ -188,7 +188,7 @@ def _parseGeneral(lines, efifilepath):
elif pcd[4] == index:
pcds.append([pcd[0], efisec[2] + pcd[2], efisec[0]])
return pcds
-
+
def generatePcdTable(list, pcdpath):
try:
f = open(pcdpath, 'w')
@@ -196,12 +196,12 @@ def generatePcdTable(list, pcdpath):
pass
f.write('PCD Name Offset Section Name\r\n')
-
+
for pcditem in list:
f.write('%-30s 0x%-08X %-6s\r\n' % (pcditem[0], pcditem[1], pcditem[2]))
f.close()
- #print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
+ #print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
if __name__ == '__main__':
UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>"
@@ -213,7 +213,7 @@ if __name__ == '__main__':
help='Absolute path of EFI binary file.')
parser.add_option('-o', '--outputfile', action='store', dest='outfile',
help='Absolute path of output file to store the got patchable PCD table.')
-
+
(options, args) = parser.parse_args()
if options.mapfile is None or options.efifile is None:
diff --git a/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py b/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
index cf2fc7c4f7..8e243aea96 100644
--- a/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
+++ b/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
@@ -30,14 +30,14 @@ from Common.DataType import *
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
-__copyright__ = "Copyright (c) 2010, Intel Corporation. All rights reserved."
+__copyright__ = "Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved."
## PatchBinaryFile method
#
# This method mainly patches the data into binary file.
-#
+#
# @param FileName File path of the binary file
-# @param ValueOffset Offset value
+# @param ValueOffset Offset value
# @param TypeName DataType Name
# @param Value Value String
# @param MaxSize MaxSize value
@@ -173,7 +173,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)
else:
#
- # Patch ascii string
+ # Patch ascii string
#
Index = 0
for ByteString in ValueString[1:-1]:
diff --git a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py
index d8048d49a2..a34dac423b 100644
--- a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py
+++ b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py
@@ -1,11 +1,11 @@
## @file
-# This tool can be used to generate new RSA 2048 bit private/public key pairs
-# in a PEM file format using OpenSSL command line utilities that are installed
+# This tool can be used to generate new RSA 2048 bit private/public key pairs
+# in a PEM file format using OpenSSL command line utilities that are installed
# on the path specified by the system environment variable OPENSSL_PATH.
-# This tool can also optionally write one or more SHA 256 hashes of 2048 bit
-# public keys to a binary file, write one or more SHA 256 hashes of 2048 bit
-# public keys to a file in a C structure format, and in verbose mode display
-# one or more SHA 256 hashes of 2048 bit public keys in a C structure format
+# This tool can also optionally write one or more SHA 256 hashes of 2048 bit
+# public keys to a binary file, write one or more SHA 256 hashes of 2048 bit
+# public keys to a file in a C structure format, and in verbose mode display
+# one or more SHA 256 hashes of 2048 bit public keys in a C structure format
# on STDOUT.
# This tool has been tested with OpenSSL 1.0.1e 11 Feb 2013
#
@@ -26,7 +26,7 @@ from __future__ import print_function
import os
import sys
-import argparse
+import argparse
import subprocess
from Common.BuildVersion import gBUILD_VERSION
@@ -35,14 +35,14 @@ from Common.BuildVersion import gBUILD_VERSION
#
__prog__ = 'Rsa2048Sha256GenerateKeys'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
-__copyright__ = 'Copyright (c) 2013 - 2014, Intel Corporation. All rights reserved.'
+__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
__usage__ = '%s [options]' % (__prog__)
if __name__ == '__main__':
#
# Create command line argument parser object
- #
+ #
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-o", "--output", dest='OutputFile', type=argparse.FileType('wb'), metavar='filename', nargs='*', help="specify the output private key filename in PEM format")
@@ -55,7 +55,7 @@ if __name__ == '__main__':
#
# Parse command line arguments
- #
+ #
args = parser.parse_args()
#
@@ -75,18 +75,18 @@ if __name__ == '__main__':
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
- except:
+ except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
-
+
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0])
-
+
args.PemFileName = []
-
+
#
# Check for output file argument
#
@@ -106,7 +106,7 @@ if __name__ == '__main__':
if Process.returncode != 0:
print('ERROR: RSA 2048 key generation failed')
sys.exit(Process.returncode)
-
+
#
# Check for input file argument
#
@@ -158,7 +158,7 @@ if __name__ == '__main__':
for Item in PublicKeyHash:
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item))
PublicKeyHashC = PublicKeyHashC[:-2] + '}'
-
+
#
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file
#
@@ -167,7 +167,7 @@ if __name__ == '__main__':
args.PublicKeyHashCFile.close ()
except:
pass
-
+
#
# If verbose is enabled display the public key in C structure format
#
diff --git a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
index 807772daff..3fd7eefd6a 100644
--- a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
+++ b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
@@ -21,7 +21,7 @@ from __future__ import print_function
import os
import sys
-import argparse
+import argparse
import subprocess
import uuid
import struct
@@ -33,7 +33,7 @@ from Common.BuildVersion import gBUILD_VERSION
#
__prog__ = 'Rsa2048Sha256Sign'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
-__copyright__ = 'Copyright (c) 2013 - 2016, Intel Corporation. All rights reserved.'
+__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
__usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
#
@@ -61,7 +61,7 @@ TEST_SIGNING_PRIVATE_KEY_FILENAME = 'TestSigningPrivateKey.pem'
if __name__ == '__main__':
#
# Create command line argument parser object
- #
+ #
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
@@ -76,7 +76,7 @@ if __name__ == '__main__':
#
# Parse command line arguments
- #
+ #
args = parser.parse_args()
#
@@ -96,19 +96,19 @@ if __name__ == '__main__':
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
- except:
+ except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
-
+
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0])
-
+
#
# Read input file into a buffer and save input filename
- #
+ #
args.InputFileName = args.InputFile.name
args.InputFileBuffer = args.InputFile.read()
args.InputFile.close()
@@ -174,17 +174,17 @@ if __name__ == '__main__':
if args.MonotonicCountStr:
format = "%dsQ" % len(args.InputFileBuffer)
FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
- #
+ #
# Sign the input file using the specified private key and capture signature from STDOUT
#
Process = subprocess.Popen('%s dgst -sha256 -sign "%s"' % (OpenSslCommand, args.PrivateKeyFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Signature = Process.communicate(input=FullInputFileBuffer)[0]
if Process.returncode != 0:
sys.exit(Process.returncode)
-
+
#
# Write output file that contains hash GUID, Public Key, Signature, and Input data
- #
+ #
args.OutputFile = open(args.OutputFileName, 'wb')
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le())
args.OutputFile.write(PublicKey)
@@ -198,7 +198,7 @@ if __name__ == '__main__':
#
Header = EFI_CERT_BLOCK_RSA_2048_SHA256._make(EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.unpack_from(args.InputFileBuffer))
args.InputFileBuffer = args.InputFileBuffer[EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.size:]
-
+
#
# Verify that the Hash Type matches the expected SHA256 type
#
@@ -222,10 +222,10 @@ if __name__ == '__main__':
# Write Signature to output file
#
open(args.OutputFileName, 'wb').write(Header.Signature)
-
+
#
# Verify signature
- #
+ #
Process = subprocess.Popen('%s dgst -sha256 -prverify "%s" -signature %s' % (OpenSslCommand, args.PrivateKeyFileName, args.OutputFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Process.communicate(input=FullInputFileBuffer)
if Process.returncode != 0:
@@ -234,6 +234,6 @@ if __name__ == '__main__':
sys.exit(Process.returncode)
#
- # Save output file contents from input file
- #
+ # Save output file contents from input file
+ #
open(args.OutputFileName, 'wb').write(args.InputFileBuffer)
diff --git a/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt b/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt
index b0492e2914..7d97162782 100644
--- a/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt
+++ b/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt
@@ -1 +1 @@
-{0x91, 0x29, 0xc4, 0xbd, 0xea, 0x6d, 0xda, 0xb3, 0xaa, 0x6f, 0x50, 0x16, 0xfc, 0xdb, 0x4b, 0x7e, 0x3c, 0xd6, 0xdc, 0xa4, 0x7a, 0x0e, 0xdd, 0xe6, 0x15, 0x8c, 0x73, 0x96, 0xa2, 0xd4, 0xa6, 0x4d} \ No newline at end of file
+{0x91, 0x29, 0xc4, 0xbd, 0xea, 0x6d, 0xda, 0xb3, 0xaa, 0x6f, 0x50, 0x16, 0xfc, 0xdb, 0x4b, 0x7e, 0x3c, 0xd6, 0xdc, 0xa4, 0x7a, 0x0e, 0xdd, 0xe6, 0x15, 0x8c, 0x73, 0x96, 0xa2, 0xd4, 0xa6, 0x4d}
diff --git a/BaseTools/Source/Python/Table/Table.py b/BaseTools/Source/Python/Table/Table.py
index c311df91c2..e89b99320d 100644
--- a/BaseTools/Source/Python/Table/Table.py
+++ b/BaseTools/Source/Python/Table/Table.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase a common table
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -19,7 +19,7 @@ import Common.EdkLogger as EdkLogger
## TableFile
#
# This class defined a common table
-#
+#
# @param object: Inherited from object class
#
# @param Cursor: Cursor of the database
@@ -30,7 +30,7 @@ class Table(object):
self.Cur = Cursor
self.Table = ''
self.ID = 0
-
+
## Create table
#
# Create a table
@@ -46,18 +46,18 @@ class Table(object):
#
def Insert(self, SqlCommand):
self.Exec(SqlCommand)
-
+
## Query table
#
# Query all records of the table
- #
+ #
def Query(self):
EdkLogger.verbose("\nQuery tabel %s started ..." % self.Table)
SqlCommand = """select * from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Rs in self.Cur:
EdkLogger.verbose(str(Rs))
-
+
TotalCount = self.GetCount()
EdkLogger.verbose("*** Total %s records in table %s ***" % (TotalCount, self.Table) )
EdkLogger.verbose("Query tabel %s DONE!" % self.Table)
@@ -70,7 +70,7 @@ class Table(object):
SqlCommand = """drop table IF EXISTS %s""" % self.Table
self.Cur.execute(SqlCommand)
EdkLogger.verbose("Drop tabel %s ... DONE!" % self.Table)
-
+
## Get count
#
# Get a count of all records of the table
@@ -82,12 +82,12 @@ class Table(object):
self.Cur.execute(SqlCommand)
for Item in self.Cur:
return Item[0]
-
+
## Generate ID
#
# Generate an ID if input ID is -1
#
- # @param ID: Input ID
+ # @param ID: Input ID
#
# @retval ID: New generated ID
#
@@ -96,14 +96,14 @@ class Table(object):
self.ID = self.ID + 1
return self.ID
-
+
## Init the ID of the table
#
# Init the ID of the table
#
def InitID(self):
self.ID = self.GetCount()
-
+
## Exec
#
# Exec Sql Command, return result
diff --git a/BaseTools/Source/Python/Table/TableDataModel.py b/BaseTools/Source/Python/Table/TableDataModel.py
index 2c37592fc6..f167e43359 100644
--- a/BaseTools/Source/Python/Table/TableDataModel.py
+++ b/BaseTools/Source/Python/Table/TableDataModel.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for data models
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableDataModel
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableDataModel(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'DataModel'
-
+
## Create table
#
# Create table DataModel
@@ -62,13 +62,13 @@ class TableDataModel(Table):
(Name, Description) = ConvertToSqlString((Name, Description))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s')""" % (self.Table, self.ID, CrossIndex, Name, Description)
Table.Insert(self, SqlCommand)
-
+
return self.ID
-
+
## Init table
#
# Create all default records of table DataModel
- #
+ #
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
for Item in DataClass.MODEL_LIST:
@@ -77,7 +77,7 @@ class TableDataModel(Table):
Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
-
+
## Get CrossIndex
#
# Get a model's cross index from its name
@@ -91,5 +91,5 @@ class TableDataModel(Table):
self.Cur.execute(SqlCommand)
for Item in self.Cur:
CrossIndex = Item[0]
-
+
return CrossIndex
diff --git a/BaseTools/Source/Python/Table/TableDec.py b/BaseTools/Source/Python/Table/TableDec.py
index 97139c58d8..faa18e309d 100644
--- a/BaseTools/Source/Python/Table/TableDec.py
+++ b/BaseTools/Source/Python/Table/TableDec.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for dec datas
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableDec
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableDec(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Dec'
-
+
## Create table
#
# Create table Dec
@@ -90,14 +90,14 @@ class TableDec(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
-
+
return self.ID
-
+
## Query table
#
- # @param Model: The Model of Record
+ # @param Model: The Model of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
diff --git a/BaseTools/Source/Python/Table/TableDsc.py b/BaseTools/Source/Python/Table/TableDsc.py
index 4ac54933aa..2277489518 100644
--- a/BaseTools/Source/Python/Table/TableDsc.py
+++ b/BaseTools/Source/Python/Table/TableDsc.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for dsc datas
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableDsc
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableDsc(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Dsc'
-
+
## Create table
#
# Create table Dsc
@@ -90,14 +90,14 @@ class TableDsc(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
-
+
return self.ID
-
+
## Query table
#
- # @param Model: The Model of Record
+ # @param Model: The Model of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
diff --git a/BaseTools/Source/Python/Table/TableEotReport.py b/BaseTools/Source/Python/Table/TableEotReport.py
index bccf25ca45..e8291b48d7 100644
--- a/BaseTools/Source/Python/Table/TableEotReport.py
+++ b/BaseTools/Source/Python/Table/TableEotReport.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for ECC reports
#
-# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,7 +24,7 @@ import Eot.EotGlobalData as EotGlobalData
## TableReport
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -32,7 +32,7 @@ class TableEotReport(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Report'
-
+
## Create table
#
# Create table report
@@ -68,9 +68,9 @@ class TableEotReport(Table):
% (self.Table, self.ID, ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, \
ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled)
Table.Insert(self, SqlCommand)
-
+
def GetMaxID(self):
SqlCommand = """select max(ID) from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Item in self.Cur:
- return Item[0] \ No newline at end of file
+ return Item[0]
diff --git a/BaseTools/Source/Python/Table/TableFdf.py b/BaseTools/Source/Python/Table/TableFdf.py
index eea8e9404d..872afc79ef 100644
--- a/BaseTools/Source/Python/Table/TableFdf.py
+++ b/BaseTools/Source/Python/Table/TableFdf.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for fdf datas
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableFdf
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableFdf(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Fdf'
-
+
## Create table
#
# Create table Fdf
@@ -91,14 +91,14 @@ class TableFdf(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
-
+
return self.ID
-
+
## Query table
#
- # @param Model: The Model of Record
+ # @param Model: The Model of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine from %s
diff --git a/BaseTools/Source/Python/Table/TableFile.py b/BaseTools/Source/Python/Table/TableFile.py
index ac762ea7fc..34a0b47418 100644
--- a/BaseTools/Source/Python/Table/TableFile.py
+++ b/BaseTools/Source/Python/Table/TableFile.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for files
#
-# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -23,14 +23,14 @@ from CommonDataClass.DataClass import FileClass
## TableFile
#
# This class defined a table used for file
-#
+#
# @param object: Inherited from object class
#
class TableFile(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'File'
-
+
## Create table
#
# Create table File
@@ -72,15 +72,15 @@ class TableFile(Table):
SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, '%s')""" \
% (self.Table, self.ID, Name, ExtName, Path, FullPath, Model, TimeStamp)
Table.Insert(self, SqlCommand)
-
+
return self.ID
## InsertFile
#
# Insert one file to table
#
# @param FileFullPath: The full path of the file
- # @param Model: The model of the file
- #
+ # @param Model: The model of the file
+ #
# @retval FileID: The ID after record is inserted
#
def InsertFile(self, FileFullPath, Model):
@@ -89,7 +89,7 @@ class TableFile(Table):
TimeStamp = os.stat(FileFullPath)[8]
File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
return self.Insert(File.Name, File.ExtName, File.Path, File.FullPath, File.Model, TimeStamp)
-
+
## Get ID of a given file
#
# @param FilePath Path of file
diff --git a/BaseTools/Source/Python/Table/TableFunction.py b/BaseTools/Source/Python/Table/TableFunction.py
index 3d40bd61f6..bf301fd262 100644
--- a/BaseTools/Source/Python/Table/TableFunction.py
+++ b/BaseTools/Source/Python/Table/TableFunction.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for functions
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,21 +21,21 @@ from Common.StringUtils import ConvertToSqlString
## TableFunction
#
# This class defined a table used for function
-#
+#
# @param Table: Inherited from Table class
#
class TableFunction(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Function'
-
+
## Create table
#
# Create table Function
#
# @param ID: ID of a Function
# @param Header: Header of a Function
- # @param Modifier: Modifier of a Function
+ # @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Funciont
# @param StartLine: StartLine of a Function
@@ -72,7 +72,7 @@ class TableFunction(Table):
#
# @param ID: ID of a Function
# @param Header: Header of a Function
- # @param Modifier: Modifier of a Function
+ # @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Funciont
# @param StartLine: StartLine of a Function
diff --git a/BaseTools/Source/Python/Table/TableIdentifier.py b/BaseTools/Source/Python/Table/TableIdentifier.py
index 0ec8b3c162..5ce528b26a 100644
--- a/BaseTools/Source/Python/Table/TableIdentifier.py
+++ b/BaseTools/Source/Python/Table/TableIdentifier.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for Identifiers
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,7 +21,7 @@ from Table import Table
## TableIdentifier
#
# This class defined a table used for Identifier
-#
+#
# @param object: Inherited from object class
#
#
@@ -29,7 +29,7 @@ class TableIdentifier(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Identifier'
-
+
## Create table
#
# Create table Identifier
@@ -87,4 +87,4 @@ class TableIdentifier(Table):
% (self.Table, self.ID, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
Table.Insert(self, SqlCommand)
- return self.ID \ No newline at end of file
+ return self.ID
diff --git a/BaseTools/Source/Python/Table/TableInf.py b/BaseTools/Source/Python/Table/TableInf.py
index 478b77776e..c524256a0c 100644
--- a/BaseTools/Source/Python/Table/TableInf.py
+++ b/BaseTools/Source/Python/Table/TableInf.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for inf datas
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableInf
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableInf(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Inf'
-
+
## Create table
#
# Create table Inf
@@ -96,14 +96,14 @@ class TableInf(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
-
+
return self.ID
-
+
## Query table
#
- # @param Model: The Model of Record
+ # @param Model: The Model of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
diff --git a/BaseTools/Source/Python/Table/TablePcd.py b/BaseTools/Source/Python/Table/TablePcd.py
index ca1c0f0c8a..689ae22e89 100644
--- a/BaseTools/Source/Python/Table/TablePcd.py
+++ b/BaseTools/Source/Python/Table/TablePcd.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for pcds
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,7 +21,7 @@ from Common.StringUtils import ConvertToSqlString
## TablePcd
#
# This class defined a table used for pcds
-#
+#
# @param object: Inherited from object class
#
#
@@ -29,7 +29,7 @@ class TablePcd(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Pcd'
-
+
## Create table
#
# Create table Pcd
@@ -87,4 +87,4 @@ class TablePcd(Table):
% (self.Table, self.ID, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
Table.Insert(self, SqlCommand)
- return self.ID \ No newline at end of file
+ return self.ID
diff --git a/BaseTools/Source/Python/Table/TableReport.py b/BaseTools/Source/Python/Table/TableReport.py
index 9ce1d0aa25..1a039249ff 100644
--- a/BaseTools/Source/Python/Table/TableReport.py
+++ b/BaseTools/Source/Python/Table/TableReport.py
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for ECC reports
#
-# Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -25,7 +25,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
## TableReport
#
# This class defined a table used for data model
-#
+#
# @param object: Inherited from object class
#
#
@@ -33,7 +33,7 @@ class TableReport(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Report'
-
+
## Create table
#
# Create table report
@@ -78,7 +78,7 @@ class TableReport(Table):
## Query table
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self):
SqlCommand = """select ID, ErrorID, OtherMsg, BelongsToTable, BelongsToItem, Corrected from %s
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py
index 26d2bb9ebf..8d523a6ab8 100644
--- a/BaseTools/Source/Python/TargetTool/TargetTool.py
+++ b/BaseTools/Source/Python/TargetTool/TargetTool.py
@@ -86,13 +86,13 @@ class TargetTool():
if isinstance(self.TargetTxtDictionary[Key], type([])):
print("%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key])))
elif self.TargetTxtDictionary[Key] is None:
- errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
+ errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
else:
print("%-30s = %s" % (Key, self.TargetTxtDictionary[Key]))
-
+
if errMsg != '':
print(os.linesep + 'Warning:' + os.linesep + errMsg)
-
+
def RWFile(self, CommentCharacter, KeySplitCharacter, Num):
try:
fr = open(self.FileName, 'r')
@@ -111,7 +111,7 @@ class TargetTool():
existKeys.append(Key)
else:
print("Warning: Found duplicate key item in original configuration files!")
-
+
if Num == 0:
Line = "%-30s = \n" % Key
else:
@@ -126,12 +126,12 @@ class TargetTool():
if Line is None:
Line = "%-30s = " % key
fw.write(Line)
-
+
fr.close()
fw.close()
os.remove(self.FileName)
os.rename(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), self.FileName)
-
+
except:
last_type, last_value, last_tb = sys.exc_info()
traceback.print_exception(last_type, last_value, last_tb)
@@ -143,20 +143,20 @@ def GetConfigureKeyValue(self, Key):
if os.path.exists(dscFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
else:
- EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
if os.path.exists(tooldefFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
else:
- EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
elif self.Opt.NUM >= 2:
Line = "%-30s = %s\n" % (Key, 'Enable')
elif self.Opt.NUM <= 1:
- Line = "%-30s = %s\n" % (Key, 'Disable')
+ Line = "%-30s = %s\n" % (Key, 'Disable')
elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
@@ -170,13 +170,13 @@ def GetConfigureKeyValue(self, Key):
if os.path.exists(buildruleFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
else:
- EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"Build rule file %s does not exist!" % self.Opt.BUILD_RULE_FILE, RaiseError=False)
return Line
VersionNumber = ("0.01" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + VersionNumber
-__copyright__ = "Copyright (c) 2007 - 2010, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
__usage__ = "%prog [options] {args} \
\nArgs: \
\n Clean clean the all default configuration of target.txt. \
@@ -200,7 +200,7 @@ def RangeCheckCallback(option, opt_str, value, parser):
setattr(parser.values, option.dest, value)
else:
parser.error("Option %s only allows one instance in command line!" % option)
-
+
def MyOptionParser():
parser = OptionParser(version=__version__, prog="TargetTool.exe", usage=__usage__, description=__copyright__)
parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32', 'X64', 'IPF', 'EBC', 'ARM', 'AARCH64', '0'], dest="TARGET_ARCH",
@@ -226,7 +226,7 @@ if __name__ == '__main__':
if os.getenv('WORKSPACE') is None:
print("ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool")
sys.exit(1)
-
+
(opt, args) = MyOptionParser()
if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
print("The number of args isn't 1 or the value of args is invalid.")
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py
index b46d507b4e..4b3091bec3 100644
--- a/BaseTools/Source/Python/Trim/Trim.py
+++ b/BaseTools/Source/Python/Trim/Trim.py
@@ -31,7 +31,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
-__copyright__ = "Copyright (c) 2007-2017, Intel Corporation. All rights reserved."
+__copyright__ = "Copyright (c) 2007-2018, Intel Corporation. All rights reserved."
## Regular expression for matching Line Control directive like "#line xxx"
gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"')
@@ -261,7 +261,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
#
def TrimPreprocessedVfr(Source, Target):
CreateDirectory(os.path.dirname(Target))
-
+
try:
f = open (Source, 'r')
except:
@@ -338,7 +338,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
SearchPathList = [LocalSearchPath] + IncludePathList
else:
SearchPathList = IncludePathList
-
+
for IncludePath in SearchPathList:
IncludeFile = os.path.join(IncludePath, Source)
if os.path.isfile(IncludeFile):
@@ -349,7 +349,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
-
+
# avoid A "include" B and B "include" A
IncludeFile = os.path.abspath(os.path.normpath(IncludeFile))
if IncludeFile in gIncludedAslFile:
@@ -357,7 +357,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile))
return []
gIncludedAslFile.append(IncludeFile)
-
+
for Line in F:
LocalSearchPath = None
Result = gAslIncludePattern.findall(Line)
@@ -367,7 +367,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
NewFileContent.append("%s%s" % (Indent, Line))
continue
#
- # We should first search the local directory if current file are using pattern #include "XXX"
+ # We should first search the local directory if current file are using pattern #include "XXX"
#
if Result[0][2] == '"':
LocalSearchPath = os.path.dirname(IncludeFile)
@@ -388,20 +388,20 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
#
# @param Source File to be trimmed
# @param Target File to store the trimmed content
-# @param IncludePathFile The file to log the external include path
+# @param IncludePathFile The file to log the external include path
#
def TrimAslFile(Source, Target, IncludePathFile):
CreateDirectory(os.path.dirname(Target))
-
+
SourceDir = os.path.dirname(Source)
if SourceDir == '':
SourceDir = '.'
-
+
#
# Add source directory as the first search directory
#
IncludePathList = [SourceDir]
-
+
#
# If additional include path file is specified, append them all
# to the search directory list.
@@ -672,7 +672,7 @@ def Main():
EdkLogger.SetLevel(CommandOptions.LogLevel)
except FatalError as X:
return 1
-
+
try:
if CommandOptions.FileType == "Vfr":
if CommandOptions.OutputFile is None:
diff --git a/BaseTools/Source/Python/UPT/BuildVersion.py b/BaseTools/Source/Python/UPT/BuildVersion.py
index edd4696753..eca8c1ce35 100644
--- a/BaseTools/Source/Python/UPT/BuildVersion.py
+++ b/BaseTools/Source/Python/UPT/BuildVersion.py
@@ -2,11 +2,11 @@
#
# This file is for build version number auto generation
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
Build version information
'''
-gBUILD_VERSION = ""
+gBUILD_VERSION = "Developer Build based on Revision: Unknown"
diff --git a/BaseTools/Source/Python/UPT/Core/DependencyRules.py b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
index 406a8a7e92..9c3baa1b12 100644
--- a/BaseTools/Source/Python/UPT/Core/DependencyRules.py
+++ b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
@@ -1,11 +1,11 @@
## @file
# This file is for installed package information database operations
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
#
@@ -40,7 +40,7 @@ DEPEX_CHECK_PACKAGE_NOT_FOUND, DEPEX_CHECK_DP_NOT_FOUND) = (0, 1, 2, 3)
## DependencyRules
#
# This class represents the dependency rule check mechanism
-#
+#
# @param object: Inherited from object class
#
class DependencyRules(object):
@@ -53,7 +53,7 @@ class DependencyRules(object):
# Add package info from the DIST to be installed.
self.PkgsToBeDepend.extend(self.GenToBeInstalledPkgList(ToBeInstalledPkgList))
-
+
def GenToBeInstalledPkgList(self, ToBeInstalledPkgList):
if not ToBeInstalledPkgList:
return []
@@ -81,7 +81,7 @@ class DependencyRules(object):
return True
else:
return False
-
+
## Check whether a module depex satisfied.
#
# @param ModuleObj: A module object
@@ -101,7 +101,7 @@ class DependencyRules(object):
#
Exist = self.CheckPackageExists(Dep.GetGuid(), Dep.GetVersion())
#
- # check whether satisfied by current distribution
+ # check whether satisfied by current distribution
#
if not Exist:
if DpObj is None:
@@ -119,7 +119,7 @@ class DependencyRules(object):
else:
Result = False
break
-
+
if not Result:
Logger.Error("CheckModuleDepex", UNKNOWN_ERROR, \
ST.ERR_DEPENDENCY_NOT_MATCH % (ModuleObj.GetName(), \
@@ -127,7 +127,7 @@ class DependencyRules(object):
Dep.GetGuid(), \
Dep.GetVersion()))
return Result
-
+
## Check whether a package exists in a package list specified by PkgsToBeDepend.
#
# @param Guid: Guid of a package
@@ -154,7 +154,7 @@ class DependencyRules(object):
Logger.Verbose(ST.MSG_CHECK_PACKAGE_FINISH)
return Found
-
+
## Check whether a package depex satisfied.
#
# @param PkgObj: A package object
@@ -171,7 +171,7 @@ class DependencyRules(object):
else:
return False
return True
-
+
## Check whether a DP exists.
#
# @param Guid: Guid of a Distribution
@@ -216,7 +216,7 @@ class DependencyRules(object):
return True, DpObj
- ## Check whether a DP depex satisfied by current workspace
+ ## Check whether a DP depex satisfied by current workspace
# (excluding the original distribution's packages to be replaced) for Replace
#
# @param DpObj: A distribution object
@@ -243,17 +243,17 @@ class DependencyRules(object):
continue
else:
return False
-
+
for ModKey in DpObj.ModuleSurfaceArea.keys():
ModObj = DpObj.ModuleSurfaceArea[ModKey]
if self.CheckModuleDepexSatisfied(ModObj, DpObj):
continue
else:
return False
-
+
return True
-
- ## Check whether a DP could be removed from current workspace.
+
+ ## Check whether a DP could be removed from current workspace.
#
# @param DpGuid: File's guid
# @param DpVersion: File's version
@@ -267,7 +267,7 @@ class DependencyRules(object):
#
# remove modules that included in current DP
# List of item (FilePath)
- DpModuleList = self.IpiDb.GetDpModuleList(DpGuid, DpVersion)
+ DpModuleList = self.IpiDb.GetDpModuleList(DpGuid, DpVersion)
for Module in DpModuleList:
if Module in WsModuleList:
WsModuleList.remove(Module)
@@ -277,7 +277,7 @@ class DependencyRules(object):
#
# get packages in current Dp and find the install path
# List of item (PkgGuid, PkgVersion, InstallPath)
- DpPackageList = self.IpiDb.GetPackageListFromDp(DpGuid, DpVersion)
+ DpPackageList = self.IpiDb.GetPackageListFromDp(DpGuid, DpVersion)
DpPackagePathList = []
WorkSP = GlobalData.gWORKSPACE
for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
@@ -290,18 +290,18 @@ class DependencyRules(object):
else:
InstallPath = DecPath
DecFileRelaPath = DecFile
-
+
if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
DpPackagePathList.append(DecFileRelaPath)
DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
-
+
#
# the left items in DpPackageList are the packages that installed but not found anymore
#
for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
Logger.Warn("UPT",
ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
-
+
#
# check modules to see if has dependency on package of current DP
#
@@ -320,7 +320,7 @@ class DependencyRules(object):
# @param NewDpPkgList: a list of package information (Guid, Version) in new Dp
# @retval Replaceable: True if distribution could be replaced, False Else
# @retval DependModuleList: the list of modules that make distribution can not be replaced
- #
+ #
def CheckDpDepexForReplace(self, OrigDpGuid, OrigDpVersion, NewDpPkgList):
Replaceable = True
DependModuleList = []
@@ -328,19 +328,19 @@ class DependencyRules(object):
#
# remove modules that included in current DP
# List of item (FilePath)
- DpModuleList = self.IpiDb.GetDpModuleList(OrigDpGuid, OrigDpVersion)
+ DpModuleList = self.IpiDb.GetDpModuleList(OrigDpGuid, OrigDpVersion)
for Module in DpModuleList:
if Module in WsModuleList:
WsModuleList.remove(Module)
else:
Logger.Warn("UPT\n",
ST.ERR_MODULE_NOT_INSTALLED % Module)
-
+
OtherPkgList = NewDpPkgList
#
# get packages in current Dp and find the install path
# List of item (PkgGuid, PkgVersion, InstallPath)
- DpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
+ DpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
DpPackagePathList = []
WorkSP = GlobalData.gWORKSPACE
for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
@@ -353,7 +353,7 @@ class DependencyRules(object):
else:
InstallPath = DecPath
DecFileRelaPath = DecFile
-
+
if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
DpPackagePathList.append(DecFileRelaPath)
DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
@@ -366,7 +366,7 @@ class DependencyRules(object):
for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
Logger.Warn("UPT",
ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
-
+
#
# check modules to see if it can be satisfied by package not belong to removed DP
#
@@ -376,8 +376,8 @@ class DependencyRules(object):
DependModuleList.append(Module)
return (Replaceable, DependModuleList)
-
-## check whether module depends on packages in DpPackagePathList, return True
+
+## check whether module depends on packages in DpPackagePathList, return True
# if found, False else
#
# @param Path: a module path
@@ -432,7 +432,7 @@ def GetPackagePath(InfPath):
# @param DpPackagePathList: a list of Package Paths
# @param OtherPkgList: a list of Package Information (Guid, Version)
# @retval: False: module depends on package in DpPackagePathList and can not be satisfied by OtherPkgList
-# True: either module doesn't depend on DpPackagePathList or module depends on DpPackagePathList
+# True: either module doesn't depend on DpPackagePathList or module depends on DpPackagePathList
# but can be satisfied by OtherPkgList
#
def VerifyReplaceModuleDep(Path, DpPackagePathList, OtherPkgList):
diff --git a/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py b/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py
index 81c67fb510..873e2b4de4 100644
--- a/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py
+++ b/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define a class object to describe a distribution package
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -50,62 +50,62 @@ class DistributionPackageHeaderObject(IdentificationObject, \
self.Date = ''
self.Signature = 'Md5Sum'
self.XmlSpecification = ''
-
+
def GetReadOnly(self):
return self.ReadOnly
-
+
def SetReadOnly(self, ReadOnly):
self.ReadOnly = ReadOnly
-
+
def GetRePackage(self):
return self.RePackage
-
+
def SetRePackage(self, RePackage):
self.RePackage = RePackage
-
+
def GetVendor(self):
return self.Vendor
-
+
def SetDate(self, Date):
self.Date = Date
-
+
def GetDate(self):
return self.Date
-
+
def SetSignature(self, Signature):
self.Signature = Signature
-
+
def GetSignature(self):
return self.Signature
-
+
def SetXmlSpecification(self, XmlSpecification):
self.XmlSpecification = XmlSpecification
-
+
def GetXmlSpecification(self):
return self.XmlSpecification
-
+
## DistributionPackageClass
#
# @param object: DistributionPackageClass
-#
+#
class DistributionPackageClass(object):
def __init__(self):
self.Header = DistributionPackageHeaderObject()
#
# {(Guid, Version, Path) : PackageObj}
#
- self.PackageSurfaceArea = Sdict()
+ self.PackageSurfaceArea = Sdict()
#
# {(Guid, Version, Name, Path) : ModuleObj}
#
- self.ModuleSurfaceArea = Sdict()
+ self.ModuleSurfaceArea = Sdict()
self.Tools = MiscFileObject()
self.MiscellaneousFiles = MiscFileObject()
self.UserExtensions = []
self.FileList = []
-
+
## Get all included packages and modules for a distribution package
- #
+ #
# @param WorkspaceDir: WorkspaceDir
# @param PackageList: A list of all packages
# @param ModuleList: A list of all modules
@@ -132,7 +132,7 @@ class DistributionPackageClass(object):
WsRelPath = os.path.normpath(WsRelPath)
if ModuleList and WsRelPath in ModuleList:
Logger.Error("UPT",
- OPTION_VALUE_INVALID,
+ OPTION_VALUE_INVALID,
ST.ERR_NOT_STANDALONE_MODULE_ERROR%\
(WsRelPath, PackageFile))
Filename = os.path.normpath\
@@ -140,12 +140,12 @@ class DistributionPackageClass(object):
os.path.splitext(Filename)
#
# Call INF parser to generate Inf Object.
- # Actually, this call is not directly call, but wrapped by
+ # Actually, this call is not directly call, but wrapped by
# Inf class in InfPomAlignment.
#
try:
ModuleObj = InfPomAlignment(Filename, WorkspaceDir, PackageObj.GetPackagePath())
-
+
#
# Add module to package
#
@@ -161,7 +161,7 @@ class DistributionPackageClass(object):
ST.WRN_EDK1_INF_FOUND%Filename)
else:
raise
-
+
self.PackageSurfaceArea\
[(PackageObj.GetGuid(), PackageObj.GetVersion(), \
PackageObj.GetCombinePath())] = PackageObj
@@ -176,16 +176,16 @@ class DistributionPackageClass(object):
try:
ModuleObj = InfPomAlignment(ModuleFileFullPath, WorkspaceDir)
- ModuleKey = (ModuleObj.GetGuid(),
- ModuleObj.GetVersion(),
- ModuleObj.GetName(),
+ ModuleKey = (ModuleObj.GetGuid(),
+ ModuleObj.GetVersion(),
+ ModuleObj.GetName(),
ModuleObj.GetCombinePath())
self.ModuleSurfaceArea[ModuleKey] = ModuleObj
except FatalError as ErrCode:
if ErrCode.message == EDK1_INF_ERROR:
Logger.Error("UPT",
EDK1_INF_ERROR,
- ST.WRN_EDK1_INF_FOUND%ModuleFileFullPath,
+ ST.WRN_EDK1_INF_FOUND%ModuleFileFullPath,
ExtraData=ST.ERR_NOT_SUPPORTED_SA_MODULE)
else:
raise
@@ -193,16 +193,16 @@ class DistributionPackageClass(object):
# Recover WorkspaceDir
WorkspaceDir = Root
- ## Get all files included for a distribution package, except tool/misc of
+ ## Get all files included for a distribution package, except tool/misc of
# distribution level
- #
+ #
# @retval DistFileList A list of filepath for NonMetaDataFile, relative to workspace
# @retval MetaDataFileList A list of filepath for MetaDataFile, relative to workspace
#
def GetDistributionFileList(self):
MetaDataFileList = []
SkipModulesUniList = []
-
+
for Guid, Version, Path in self.PackageSurfaceArea:
Package = self.PackageSurfaceArea[Guid, Version, Path]
PackagePath = Package.GetPackagePath()
@@ -221,7 +221,7 @@ class DistributionPackageClass(object):
MiscFileFullPath = os.path.normpath(os.path.join(PackagePath, FileObj.GetURI()))
if MiscFileFullPath not in self.FileList:
self.FileList.append(MiscFileFullPath)
-
+
Module = None
ModuleDict = Package.GetModuleDict()
for Guid, Version, Name, Path in ModuleDict:
@@ -262,12 +262,12 @@ class DistributionPackageClass(object):
for NonMetaDataFile in NonMetaDataFileList:
if NonMetaDataFile not in self.FileList:
self.FileList.append(NonMetaDataFile)
-
+
for SkipModuleUni in SkipModulesUniList:
if SkipModuleUni in self.FileList:
self.FileList.remove(SkipModuleUni)
return self.FileList, MetaDataFileList
-
+
diff --git a/BaseTools/Source/Python/UPT/Core/FileHook.py b/BaseTools/Source/Python/UPT/Core/FileHook.py
index 67e86f4f74..ceae12fb37 100644
--- a/BaseTools/Source/Python/UPT/Core/FileHook.py
+++ b/BaseTools/Source/Python/UPT/Core/FileHook.py
@@ -1,11 +1,11 @@
## @file
# This file hooks file and directory creation and removal
#
-# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
diff --git a/BaseTools/Source/Python/UPT/Core/IpiDb.py b/BaseTools/Source/Python/UPT/Core/IpiDb.py
index 3bce337481..a781d358c8 100644
--- a/BaseTools/Source/Python/UPT/Core/IpiDb.py
+++ b/BaseTools/Source/Python/UPT/Core/IpiDb.py
@@ -1,11 +1,11 @@
## @file
# This file is for installed package information database operations
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -33,8 +33,8 @@ import platform as pf
#
# This class represents the installed package information database
# Add/Remove/Get installed distribution package information here.
-#
-#
+#
+#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the database
#
@@ -71,16 +71,16 @@ class IpiDatabase(object):
SqlCommand = """
create table %s (
Dummy TEXT NOT NULL,
- PRIMARY KEY (Dummy)
+ PRIMARY KEY (Dummy)
)""" % self.DummyTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
except sqlite3.OperationalError:
- Logger.Error("UPT",
- UPT_ALREADY_RUNNING_ERROR,
+ Logger.Error("UPT",
+ UPT_ALREADY_RUNNING_ERROR,
ST.ERR_UPT_ALREADY_RUNNING_ERROR
)
-
+
#
# Create new table
#
@@ -89,12 +89,12 @@ class IpiDatabase(object):
DpGuid TEXT NOT NULL,DpVersion TEXT NOT NULL,
InstallTime REAL NOT NULL,
NewPkgFileName TEXT NOT NULL,
- PkgFileName TEXT NOT NULL,
+ PkgFileName TEXT NOT NULL,
RePackage TEXT NOT NULL,
- PRIMARY KEY (DpGuid, DpVersion)
+ PRIMARY KEY (DpGuid, DpVersion)
)""" % self.DpTable
self.Cur.execute(SqlCommand)
-
+
SqlCommand = """
create table IF NOT EXISTS %s (
FilePath TEXT NOT NULL,
@@ -104,7 +104,7 @@ class IpiDatabase(object):
PRIMARY KEY (FilePath)
)""" % self.DpFileListTable
self.Cur.execute(SqlCommand)
-
+
SqlCommand = """
create table IF NOT EXISTS %s (
PackageGuid TEXT NOT NULL,
@@ -116,7 +116,7 @@ class IpiDatabase(object):
PRIMARY KEY (PackageGuid, PackageVersion, InstallPath)
)""" % self.PkgTable
self.Cur.execute(SqlCommand)
-
+
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
@@ -129,7 +129,7 @@ class IpiDatabase(object):
PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
)""" % self.ModInPkgTable
self.Cur.execute(SqlCommand)
-
+
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
@@ -142,7 +142,7 @@ class IpiDatabase(object):
PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
)""" % self.StandaloneModTable
self.Cur.execute(SqlCommand)
-
+
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
@@ -153,9 +153,9 @@ class IpiDatabase(object):
DepexVersion TEXT
)""" % self.ModDepexTable
self.Cur.execute(SqlCommand)
-
+
self.Conn.commit()
-
+
Logger.Verbose(ST.MSG_INIT_IPI_FINISH)
def RollBack(self):
@@ -199,7 +199,7 @@ class IpiDatabase(object):
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), FilePath, \
Md5Sum)
-
+
for ModKey in DpObj.ModuleSurfaceArea.keys():
ModGuid = ModKey[0]
ModVersion = ModKey[1]
@@ -219,17 +219,17 @@ class IpiDatabase(object):
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), \
Path, Md5Sum)
-
+
#
# add tool/misc files
#
for (Path, Md5Sum) in DpObj.FileList:
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), Path, Md5Sum)
-
+
self._AddDp(DpObj.Header.GetGuid(), DpObj.Header.GetVersion(), \
NewDpPkgFileName, DpPkgFileName, RePackage)
-
+
except sqlite3.IntegrityError as DetailMsg:
Logger.Error("UPT",
UPT_DB_UPDATE_ERROR,
@@ -239,17 +239,17 @@ class IpiDatabase(object):
## Add a distribution install information
#
- # @param Guid Guid of the distribution package
- # @param Version Version of the distribution package
+ # @param Guid Guid of the distribution package
+ # @param Version Version of the distribution package
# @param NewDpFileName the saved filename of distribution package file
# @param DistributionFileName the filename of distribution package file
#
def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
RePackage):
-
+
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
-
+
#
# Add newly installed DP information to DB.
#
@@ -264,10 +264,10 @@ class IpiDatabase(object):
DistributionFileName, str(RePackage).upper())
self.Cur.execute(SqlCommand)
-
+
## Add a file list from DP
#
- # @param DpGuid: A DpGuid
+ # @param DpGuid: A DpGuid
# @param DpVersion: A DpVersion
# @param Path: A Path
# @param Path: A Md5Sum
@@ -284,26 +284,26 @@ class IpiDatabase(object):
(self.DpFileListTable, Path, DpGuid, DpVersion, Md5Sum)
self.Cur.execute(SqlCommand)
-
+
## Add a package install information
#
- # @param Guid: A package guid
+ # @param Guid: A package guid
# @param Version: A package version
- # @param DpGuid: A DpGuid
+ # @param DpGuid: A DpGuid
# @param DpVersion: A DpVersion
# @param Path: A Path
#
def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
-
+
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
-
+
if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
-
+
if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
-
+
#
# Add newly installed package information to DB.
#
@@ -312,10 +312,10 @@ class IpiDatabase(object):
"""insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \
(self.PkgTable, Guid, Version, CurrentTime, DpGuid, DpVersion, Path)
self.Cur.execute(SqlCommand)
-
+
## Add a module that from a package install information
#
- # @param Guid: Module Guid
+ # @param Guid: Module Guid
# @param Version: Module version
# @param Name: Module Name
# @param PkgGuid: Package Guid
@@ -324,21 +324,21 @@ class IpiDatabase(object):
#
def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
PkgVersion=None, Path=''):
-
+
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
-
+
if PkgGuid is None or len(PkgGuid.strip()) == 0:
PkgGuid = 'N/A'
-
+
if PkgVersion is None or len(PkgVersion.strip()) == 0:
PkgVersion = 'N/A'
-
+
if os.name == 'posix':
Path = Path.replace('\\', os.sep)
else:
Path = Path.replace('/', os.sep)
-
+
#
# Add module from package information to DB.
#
@@ -348,7 +348,7 @@ class IpiDatabase(object):
(self.ModInPkgTable, Guid, Version, Name, CurrentTime, PkgGuid, PkgVersion, \
Path)
self.Cur.execute(SqlCommand)
-
+
## Add a module that is standalone install information
#
# @param Guid: a module Guid
@@ -360,16 +360,16 @@ class IpiDatabase(object):
#
def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
DpVersion=None, Path=''):
-
+
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
-
+
if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
-
+
if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
-
+
#
# Add module standalone information to DB.
#
@@ -379,7 +379,7 @@ class IpiDatabase(object):
(self.StandaloneModTable, Guid, Version, Name, CurrentTime, DpGuid, \
DpVersion, Path)
self.Cur.execute(SqlCommand)
-
+
## Add a module depex
#
# @param Guid: a module Guid
@@ -390,49 +390,49 @@ class IpiDatabase(object):
#
def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
DepexVersion=None):
-
+
if DepexGuid is None or len(DepexGuid.strip()) == 0:
DepexGuid = 'N/A'
-
+
if DepexVersion is None or len(DepexVersion.strip()) == 0:
DepexVersion = 'N/A'
-
+
if os.name == 'posix':
Path = Path.replace('\\', os.sep)
else:
Path = Path.replace('/', os.sep)
-
+
#
# Add module depex information to DB.
#
SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s', '%s')"""\
% (self.ModDepexTable, Guid, Version, Name, Path, DepexGuid, DepexVersion)
self.Cur.execute(SqlCommand)
-
- ## Remove a distribution install information, if no version specified,
+
+ ## Remove a distribution install information, if no version specified,
# remove all DPs with this Guid.
#
- # @param DpGuid: guid of dpex
+ # @param DpGuid: guid of dpex
# @param DpVersion: version of dpex
#
def RemoveDpObj(self, DpGuid, DpVersion):
-
+
PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
#
# delete from ModDepex the standalone module's dependency
#
SqlCommand = \
- """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
- (select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s'
+ """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
+ (select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s'
and B.DpVersion = '%s')
and ModDepexInfo.ModuleVersion in
- (select ModuleVersion from StandaloneModInfo as B
+ (select ModuleVersion from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s')
and ModDepexInfo.ModuleName in
- (select ModuleName from StandaloneModInfo as B
+ (select ModuleName from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s')
and ModDepexInfo.InstallPath in
- (select InstallPath from StandaloneModInfo as B
+ (select InstallPath from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s') """ % \
(DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)
@@ -443,24 +443,24 @@ class IpiDatabase(object):
for Pkg in PkgList:
SqlCommand = \
- """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
- (select ModuleGuid from ModInPkgInfo
- where ModInPkgInfo.PackageGuid ='%s' and
+ """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
+ (select ModuleGuid from ModInPkgInfo
+ where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.ModuleVersion in
- (select ModuleVersion from ModInPkgInfo
- where ModInPkgInfo.PackageGuid ='%s' and
+ (select ModuleVersion from ModInPkgInfo
+ where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.ModuleName in
- (select ModuleName from ModInPkgInfo
- where ModInPkgInfo.PackageGuid ='%s' and
+ (select ModuleName from ModInPkgInfo
+ where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.InstallPath in
- (select InstallPath from ModInPkgInfo where
- ModInPkgInfo.PackageGuid ='%s'
+ (select InstallPath from ModInPkgInfo where
+ ModInPkgInfo.PackageGuid ='%s'
and ModInPkgInfo.PackageVersion = '%s')""" \
% (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1])
-
+
self.Cur.execute(SqlCommand)
#
# delete the standalone module
@@ -474,7 +474,7 @@ class IpiDatabase(object):
#
for Pkg in PkgList:
SqlCommand = \
- """delete from %s where %s.PackageGuid ='%s'
+ """delete from %s where %s.PackageGuid ='%s'
and %s.PackageVersion = '%s'""" % \
(self.ModInPkgTable, self.ModInPkgTable, Pkg[0], \
self.ModInPkgTable, Pkg[1])
@@ -493,23 +493,23 @@ class IpiDatabase(object):
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
- #
+ #
# delete DP
#
SqlCommand = \
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
-
+
#self.Conn.commit()
-
+
## Get a list of distribution install information.
#
- # @param Guid: distribution package guid
- # @param Version: distribution package version
+ # @param Guid: distribution package guid
+ # @param Version: distribution package version
#
def GetDp(self, Guid, Version):
-
+
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
@@ -517,7 +517,7 @@ class IpiDatabase(object):
SqlCommand = """select * from %s where DpGuid ='%s'""" % \
(self.DpTable, DpGuid)
self.Cur.execute(SqlCommand)
-
+
else:
Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_START)
(DpGuid, DpVersion) = (Guid, Version)
@@ -533,14 +533,14 @@ class IpiDatabase(object):
InstallTime = DpInfo[2]
PkgFileName = DpInfo[3]
DpList.append((DpGuid, DpVersion, InstallTime, PkgFileName))
-
- Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_FINISH)
+
+ Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_FINISH)
return DpList
-
+
## Get a list of distribution install dirs
#
- # @param Guid: distribution package guid
- # @param Version: distribution package version
+ # @param Guid: distribution package guid
+ # @param Version: distribution package version
#
def GetDpInstallDirList(self, Guid, Version):
SqlCommand = """select InstallPath from PkgInfo where DpGuid = '%s' and DpVersion = '%s'""" % (Guid, Version)
@@ -562,11 +562,11 @@ class IpiDatabase(object):
## Get a list of distribution install file path information.
#
- # @param Guid: distribution package guid
- # @param Version: distribution package version
+ # @param Guid: distribution package guid
+ # @param Version: distribution package version
#
def GetDpFileList(self, Guid, Version):
-
+
(DpGuid, DpVersion) = (Guid, Version)
SqlCommand = \
"""select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
@@ -578,7 +578,7 @@ class IpiDatabase(object):
Path = Result[0]
Md5Sum = Result[3]
PathList.append((os.path.join(self.Workspace, Path), Md5Sum))
-
+
return PathList
## Get files' repackage attribute if present that are installed into current workspace
@@ -588,12 +588,12 @@ class IpiDatabase(object):
def GetRePkgDict(self):
SqlCommand = """select * from %s """ % (self.DpTable)
self.Cur.execute(SqlCommand)
-
+
DpInfoList = []
for Result in self.Cur:
DpInfoList.append(Result)
- FileDict = {}
+ FileDict = {}
for Result in DpInfoList:
DpGuid = Result[0]
DpVersion = Result[1]
@@ -606,12 +606,12 @@ class IpiDatabase(object):
for FileInfo in self.GetDpFileList(DpGuid, DpVersion):
PathInfo = FileInfo[0]
FileDict[PathInfo] = DpGuid, DpVersion, NewDpFileName, RePackage
-
+
return FileDict
-
+
## Get (Guid, Version) from distribution file name information.
#
- # @param DistributionFile: Distribution File
+ # @param DistributionFile: Distribution File
#
def GetDpByName(self, DistributionFile):
SqlCommand = """select * from %s where NewPkgFileName = '%s'""" % \
@@ -622,34 +622,34 @@ class IpiDatabase(object):
DpGuid = Result[0]
DpVersion = Result[1]
NewDpFileName = Result[3]
-
+
return (DpGuid, DpVersion, NewDpFileName)
else:
return (None, None, None)
-
+
## Get a list of package information.
#
- # @param Guid: package guid
+ # @param Guid: package guid
# @param Version: package version
#
def GetPackage(self, Guid, Version, DpGuid='', DpVersion=''):
-
+
if DpVersion == '' or DpGuid == '':
(PackageGuid, PackageVersion) = (Guid, Version)
- SqlCommand = """select * from %s where PackageGuid ='%s'
+ SqlCommand = """select * from %s where PackageGuid ='%s'
and PackageVersion = '%s'""" % (self.PkgTable, PackageGuid, \
PackageVersion)
self.Cur.execute(SqlCommand)
-
+
elif Version is None or len(Version.strip()) == 0:
-
+
SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
(self.PkgTable, Guid)
self.Cur.execute(SqlCommand)
else:
(PackageGuid, PackageVersion) = (Guid, Version)
- SqlCommand = """select * from %s where PackageGuid ='%s' and
+ SqlCommand = """select * from %s where PackageGuid ='%s' and
PackageVersion = '%s'
and DpGuid = '%s' and DpVersion = '%s'""" % \
(self.PkgTable, PackageGuid, PackageVersion, \
@@ -664,10 +664,10 @@ class IpiDatabase(object):
InstallPath = PkgInfo[5]
PkgList.append((PkgGuid, PkgVersion, InstallTime, DpGuid, \
DpVersion, InstallPath))
-
+
return PkgList
-
-
+
+
## Get a list of module in package information.
#
# @param Guid: A module guid
@@ -676,15 +676,15 @@ class IpiDatabase(object):
def GetModInPackage(self, Guid, Version, Name, Path, PkgGuid='', PkgVersion=''):
(ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
if PkgVersion == '' or PkgGuid == '':
- SqlCommand = """select * from %s where ModuleGuid ='%s' and
- ModuleVersion = '%s' and InstallPath = '%s'
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s'""" % (self.ModInPkgTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName)
self.Cur.execute(SqlCommand)
else:
- SqlCommand = """select * from %s where ModuleGuid ='%s' and
- ModuleVersion = '%s' and InstallPath = '%s'
- and ModuleName = '%s' and PackageGuid ='%s'
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s'
+ and ModuleName = '%s' and PackageGuid ='%s'
and PackageVersion = '%s'
""" % (self.ModInPkgTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName, PkgGuid, PkgVersion)
@@ -698,26 +698,26 @@ class IpiDatabase(object):
InstallPath = ModInfo[5]
ModList.append((ModGuid, ModVersion, InstallTime, PkgGuid, \
PkgVersion, InstallPath))
-
+
return ModList
-
+
## Get a list of module standalone.
#
- # @param Guid: A module guid
- # @param Version: A module version
+ # @param Guid: A module guid
+ # @param Version: A module version
#
def GetStandaloneModule(self, Guid, Version, Name, Path, DpGuid='', DpVersion=''):
(ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
if DpGuid == '':
- SqlCommand = """select * from %s where ModuleGuid ='%s' and
- ModuleVersion = '%s' and InstallPath = '%s'
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s'""" % (self.StandaloneModTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName)
self.Cur.execute(SqlCommand)
-
+
else:
- SqlCommand = """select * from %s where ModuleGuid ='%s' and
- ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s'
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s'
""" % (self.StandaloneModTable, ModuleGuid, \
ModuleVersion, ModuleName, InstallPath, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
@@ -730,18 +730,18 @@ class IpiDatabase(object):
InstallPath = ModInfo[5]
ModList.append((ModGuid, ModVersion, InstallTime, DpGuid, \
DpVersion, InstallPath))
-
+
return ModList
-
+
## Get a list of module information that comes from DP.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distrabution Guid
+ # @param DpVersion: A Distrabution version
#
def GetSModInsPathListFromDp(self, DpGuid, DpVersion):
PathList = []
- SqlCommand = """select InstallPath from %s where DpGuid ='%s'
+ SqlCommand = """select InstallPath from %s where DpGuid ='%s'
and DpVersion = '%s'
""" % (self.StandaloneModTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
@@ -749,17 +749,17 @@ class IpiDatabase(object):
for Result in self.Cur:
InstallPath = Result[0]
PathList.append(InstallPath)
-
+
return PathList
-
+
## Get a list of package information.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distrabution Guid
+ # @param DpVersion: A Distrabution version
#
def GetPackageListFromDp(self, DpGuid, DpVersion):
- SqlCommand = """select * from %s where DpGuid ='%s' and
+ SqlCommand = """select * from %s where DpGuid ='%s' and
DpVersion = '%s' """ % (self.PkgTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
@@ -769,31 +769,31 @@ class IpiDatabase(object):
PkgVersion = PkgInfo[1]
InstallPath = PkgInfo[5]
PkgList.append((PkgGuid, PkgVersion, InstallPath))
-
+
return PkgList
-
+
## Get a list of modules that depends on package information from a DP.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distrabution Guid
+ # @param DpVersion: A Distrabution version
#
def GetDpDependentModuleList(self, DpGuid, DpVersion):
-
+
ModList = []
PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
if len(PkgList) > 0:
return ModList
-
+
for Pkg in PkgList:
#
- # get all in-package modules that depends on current
- # Pkg (Guid match, Version match or NA) but not belong to
+ # get all in-package modules that depends on current
+ # Pkg (Guid match, Version match or NA) but not belong to
# current Pkg
#
- SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion,
- t1.InstallPath from %s as t1, %s as t2 where
- t1.ModuleGuid = t2.ModuleGuid and
- t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
+ SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion,
+ t1.InstallPath from %s as t1, %s as t2 where
+ t1.ModuleGuid = t2.ModuleGuid and
+ t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
t1.PackageGuid != '%s' and t1.PackageVersion != '%s'
""" % (self.ModInPkgTable, \
@@ -807,13 +807,13 @@ class IpiDatabase(object):
ModList.append((ModGuid, ModVersion, InstallPath))
#
- # get all modules from standalone modules that depends on current
+ # get all modules from standalone modules that depends on current
#Pkg (Guid match, Version match or NA) but not in current dp
#
SqlCommand = \
- """select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
- from %s as t1, %s as t2 where t1.ModuleGuid = t2.ModuleGuid and
- t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
+ """select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
+ from %s as t1, %s as t2 where t1.ModuleGuid = t2.ModuleGuid and
+ t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
t1.DpGuid != '%s' and t1.DpVersion != '%s'
""" % \
@@ -825,75 +825,75 @@ class IpiDatabase(object):
ModVersion = ModInfo[1]
InstallPath = ModInfo[2]
ModList.append((ModGuid, ModVersion, InstallPath))
-
-
+
+
return ModList
## Get Dp's list of modules.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distrabution Guid
+ # @param DpVersion: A Distrabution version
#
- def GetDpModuleList(self, DpGuid, DpVersion):
+ def GetDpModuleList(self, DpGuid, DpVersion):
ModList = []
#
# get Dp module list from the DpFileList table
#
- SqlCommand = """select FilePath
+ SqlCommand = """select FilePath
from %s
- where DpGuid = '%s' and DpVersion = '%s' and
+ where DpGuid = '%s' and DpVersion = '%s' and
FilePath like '%%.inf'
""" % (self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
for ModuleInfo in self.Cur:
FilePath = ModuleInfo[0]
ModList.append(os.path.join(self.Workspace, FilePath))
-
- return ModList
-
+ return ModList
+
+
## Get a module depex
#
- # @param DpGuid: A module Guid
- # @param DpVersion: A module version
+ # @param DpGuid: A module Guid
+ # @param DpVersion: A module version
# @param Path:
#
def GetModuleDepex(self, Guid, Version, Path):
-
+
#
# Get module depex information to DB.
#
- SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath ='%s'
""" % (self.ModDepexTable, Guid, Version, Path)
self.Cur.execute(SqlCommand)
-
+
DepexList = []
for DepInfo in self.Cur:
DepexGuid = DepInfo[3]
DepexVersion = DepInfo[4]
DepexList.append((DepexGuid, DepexVersion))
-
+
return DepexList
-
+
## Inventory the distribution installed to current workspace
#
# Inventory the distribution installed to current workspace
- #
+ #
def InventoryDistInstalled(self):
SqlCommand = """select * from %s """ % (self.DpTable)
self.Cur.execute(SqlCommand)
-
+
DpInfoList = []
for Result in self.Cur:
DpGuid = Result[0]
DpVersion = Result[1]
DpAliasName = Result[3]
- DpFileName = Result[4]
+ DpFileName = Result[4]
DpInfoList.append((DpGuid, DpVersion, DpFileName, DpAliasName))
-
- return DpInfoList
+
+ return DpInfoList
## Close entire database
#
@@ -904,18 +904,18 @@ class IpiDatabase(object):
# drop the dummy table
#
SqlCommand = """
- drop table IF EXISTS %s
+ drop table IF EXISTS %s
""" % self.DummyTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
-
+
self.Cur.close()
self.Conn.close()
## Convert To Sql String
#
# 1. Replace "'" with "''" in each item of StringList
- #
+ #
# @param StringList: A list for strings to be converted
#
def __ConvertToSqlString(self, StringList):
@@ -925,4 +925,4 @@ class IpiDatabase(object):
- \ No newline at end of file
+
diff --git a/BaseTools/Source/Python/UPT/Core/PackageFile.py b/BaseTools/Source/Python/UPT/Core/PackageFile.py
index 298d8aa9db..5eeed2aea7 100644
--- a/BaseTools/Source/Python/UPT/Core/PackageFile.py
+++ b/BaseTools/Source/Python/UPT/Core/PackageFile.py
@@ -2,11 +2,11 @@
#
# PackageFile class represents the zip file of a distribution package.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -52,25 +52,25 @@ class PackageFile:
for Filename in self._ZipFile.namelist():
self._Files[os.path.normpath(Filename)] = Filename
except BaseException as Xstr:
- Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
+ Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
ExtraData="%s (%s)" % (FileName, str(Xstr)))
BadFile = self._ZipFile.testzip()
if BadFile is not None:
- Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
+ Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
ExtraData="[%s] in %s" % (BadFile, FileName))
-
+
def GetZipFile(self):
return self._ZipFile
-
- ## Get file name
+
+ ## Get file name
#
def __str__(self):
return self._FileName
-
+
## Extract the file
- #
- # @param To: the destination file
+ #
+ # @param To: the destination file
#
def Unpack(self, ToDest):
for FileN in self._ZipFile.namelist():
@@ -78,11 +78,11 @@ class PackageFile:
Msg = "%s -> %s" % (FileN, ToFile)
Logger.Info(Msg)
self.Extract(FileN, ToFile)
-
+
## Extract the file
- #
- # @param File: the extracted file
- # @param ToFile: the destination file
+ #
+ # @param File: the extracted file
+ # @param ToFile: the destination file
#
def UnpackFile(self, File, ToFile):
File = File.replace('\\', '/')
@@ -91,13 +91,13 @@ class PackageFile:
Logger.Info(Msg)
self.Extract(File, ToFile)
return ToFile
-
+
return ''
-
+
## Extract the file
- #
- # @param Which: the source path
- # @param ToDest: the destination path
+ #
+ # @param Which: the source path
+ # @param ToDest: the destination path
#
def Extract(self, Which, ToDest):
Which = os.path.normpath(Which)
@@ -107,7 +107,7 @@ class PackageFile:
try:
FileContent = self._ZipFile.read(self._Files[Which])
except BaseException as Xstr:
- Logger.Error("PackagingTool", FILE_DECOMPRESS_FAILURE,
+ Logger.Error("PackagingTool", FILE_DECOMPRESS_FAILURE,
ExtraData="[%s] in %s (%s)" % (Which, \
self._FileName, \
str(Xstr)))
@@ -120,19 +120,19 @@ class PackageFile:
else:
ToFile = __FileHookOpen__(ToDest, 'wb')
except BaseException as Xstr:
- Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
+ Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
ExtraData="%s (%s)" % (ToDest, str(Xstr)))
try:
ToFile.write(FileContent)
ToFile.close()
except BaseException as Xstr:
- Logger.Error("PackagingTool", FILE_WRITE_FAILURE,
+ Logger.Error("PackagingTool", FILE_WRITE_FAILURE,
ExtraData="%s (%s)" % (ToDest, str(Xstr)))
## Remove the file
- #
- # @param Files: the removed files
+ #
+ # @param Files: the removed files
#
def Remove(self, Files):
TmpDir = os.path.join(tempfile.gettempdir(), ".packaging")
@@ -144,7 +144,7 @@ class PackageFile:
for SinF in Files:
SinF = os.path.normpath(SinF)
if SinF not in self._Files:
- Logger.Error("PackagingTool", FILE_NOT_FOUND,
+ Logger.Error("PackagingTool", FILE_NOT_FOUND,
ExtraData="%s is not in %s!" % \
(SinF, self._FileName))
self._Files.pop(SinF)
@@ -159,12 +159,12 @@ class PackageFile:
RemoveDirectory(TmpDir, True)
## Pack the files under Top directory, the directory shown in the zipFile start from BaseDir,
- # BaseDir should be the parent directory of the Top directory, for example,
- # Pack(Workspace\Dir1, Workspace) will pack files under Dir1, and the path in the zipfile will
+ # BaseDir should be the parent directory of the Top directory, for example,
+ # Pack(Workspace\Dir1, Workspace) will pack files under Dir1, and the path in the zipfile will
# start from Workspace
- #
- # @param Top: the top directory
- # @param BaseDir: the base directory
+ #
+ # @param Top: the top directory
+ # @param BaseDir: the base directory
#
def Pack(self, Top, BaseDir):
if not os.path.isdir(Top):
@@ -175,14 +175,14 @@ class PackageFile:
Cwd = os.getcwd()
os.chdir(BaseDir)
RelaDir = Top[Top.upper().find(BaseDir.upper()).\
- join(len(BaseDir).join(1)):]
+ join(len(BaseDir).join(1)):]
for Root, Dirs, Files in os.walk(RelaDir):
if 'CVS' in Dirs:
Dirs.remove('CVS')
if '.svn' in Dirs:
Dirs.remove('.svn')
-
+
for Dir in Dirs:
if Dir.startswith('.'):
Dirs.remove(Dir)
@@ -200,8 +200,8 @@ class PackageFile:
os.chdir(Cwd)
## Pack the file
- #
- # @param Files: the files to pack
+ #
+ # @param Files: the files to pack
#
def PackFiles(self, Files):
for File in Files:
@@ -211,9 +211,9 @@ class PackageFile:
os.chdir(Cwd)
## Pack the file
- #
- # @param File: the files to pack
- # @param ArcName: the Arc Name
+ #
+ # @param File: the files to pack
+ # @param ArcName: the Arc Name
#
def PackFile(self, File, ArcName=None):
try:
@@ -221,7 +221,7 @@ class PackageFile:
# avoid packing same file multiple times
#
if platform.system() != 'Windows':
- File = File.replace('\\', '/')
+ File = File.replace('\\', '/')
ZipedFilesNameList = self._ZipFile.namelist()
for ZipedFile in ZipedFilesNameList:
if File == os.path.normpath(ZipedFile):
@@ -233,9 +233,9 @@ class PackageFile:
ExtraData="%s (%s)" % (File, str(Xstr)))
## Write data to the packed file
- #
- # @param Data: data to write
- # @param ArcName: the Arc Name
+ #
+ # @param Data: data to write
+ # @param ArcName: the Arc Name
#
def PackData(self, Data, ArcName):
try:
@@ -247,7 +247,7 @@ class PackageFile:
ExtraData="%s (%s)" % (ArcName, str(Xstr)))
## Close file
- #
+ #
#
def Close(self):
self._ZipFile.close()
diff --git a/BaseTools/Source/Python/UPT/Core/__init__.py b/BaseTools/Source/Python/UPT/Core/__init__.py
index efe9cd09b7..86c8d267e4 100644
--- a/BaseTools/Source/Python/UPT/Core/__init__.py
+++ b/BaseTools/Source/Python/UPT/Core/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Core init file
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
index a1a9d38087..0ba07ccebc 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
@@ -4,9 +4,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -70,7 +70,7 @@ from Library.StringUtils import GetUniFileName
def GenPcd(Package, Content):
#
# generate [Pcd] section
- # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
#
ValidUsageDict = {}
for Pcd in Package.GetPcdList():
@@ -80,16 +80,16 @@ def GenPcd(Package, Content):
HelpTextList = Pcd.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
-
+
PromptList = Pcd.GetPromptList()
PromptStr = _GetHelpStr(PromptList)
CommentStr += GenGenericCommentF(PromptStr.strip(), 1, True)
-
+
PcdErrList = Pcd.GetPcdErrorsList()
for PcdErr in PcdErrList:
CommentStr += GenPcdErrComment(PcdErr)
Statement = CommentStr
-
+
CName = Pcd.GetCName()
TokenSpaceGuidCName = Pcd.GetTokenSpaceGuidCName()
DefaultValue = Pcd.GetDefaultValue()
@@ -107,7 +107,7 @@ def GenPcd(Package, Content):
ValidUsage = 'PcdsDynamic'
elif ValidUsage == 'PcdEx':
ValidUsage = 'PcdsDynamicEx'
-
+
if ValidUsage in ValidUsageDict:
NewSectionDict = ValidUsageDict[ValidUsage]
else:
@@ -129,17 +129,17 @@ def GenPcd(Package, Content):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
- NewSectionDict[SortedArch] = [Statement]
-
+ NewSectionDict[SortedArch] = [Statement]
+
for ValidUsage in ValidUsageDict:
Content += GenSection(ValidUsage, ValidUsageDict[ValidUsage], True, True)
-
+
return Content
def GenPcdErrorMsgSection(Package, Content):
if not Package.PcdErrorCommentDict:
return Content
-
+
#
# Generate '# [Error.<TokenSpcCName>]' section
#
@@ -148,14 +148,14 @@ def GenPcdErrorMsgSection(Package, Content):
SectionComment += TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_ERROR_SECTION_COMMENT + END_OF_LINE
SectionComment += TAB_COMMENT_SPLIT + END_OF_LINE
TokenSpcCNameList = []
-
+
#
# Get TokenSpcCName list in PcdErrorCommentDict in Package object
#
for (TokenSpcCName, ErrorNumber) in Package.PcdErrorCommentDict:
if TokenSpcCName not in TokenSpcCNameList:
TokenSpcCNameList.append(TokenSpcCName)
-
+
for TokenSpcCNameItem in TokenSpcCNameList:
SectionName = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_SECTION_START + TAB_PCD_ERROR + \
TAB_SPLIT + TokenSpcCNameItem + TAB_SECTION_END + END_OF_LINE
@@ -168,10 +168,10 @@ def GenPcdErrorMsgSection(Package, Content):
ErrorNumber + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT + \
PcdErrorMsg + END_OF_LINE
Content += SectionItem
-
+
Content += TAB_COMMENT_SPLIT
return Content
-
+
def GenGuidProtocolPpi(Package, Content):
#
# generate [Guids] section
@@ -203,17 +203,17 @@ def GenGuidProtocolPpi(Package, Content):
# generate tail comment
#
if Guid.GetSupModuleList():
- Statement += GenDecTailComment(Guid.GetSupModuleList())
+ Statement += GenDecTailComment(Guid.GetSupModuleList())
ArchList = sorted(Guid.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
- NewSectionDict[SortedArch] = [Statement]
+ NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Guids', NewSectionDict, True, True)
-
+
#
# generate [Protocols] section
#
@@ -232,9 +232,9 @@ def GenGuidProtocolPpi(Package, Content):
#
HelpTextList = Protocol.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
- CommentStr = GenGenericCommentF(HelpStr, 2)
+ CommentStr = GenGenericCommentF(HelpStr, 2)
- Statement = CommentStr
+ Statement = CommentStr
CName = Protocol.GetCName()
Value = GuidStringToGuidStructureString(Protocol.GetGuid())
Statement += CName.ljust(LeftOffset) + ' = ' + Value
@@ -250,9 +250,9 @@ def GenGuidProtocolPpi(Package, Content):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
- NewSectionDict[SortedArch] = [Statement]
+ NewSectionDict[SortedArch] = [Statement]
- Content += GenSection('Protocols', NewSectionDict, True, True)
+ Content += GenSection('Protocols', NewSectionDict, True, True)
#
# generate [Ppis] section
@@ -290,28 +290,28 @@ def GenGuidProtocolPpi(Package, Content):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
- NewSectionDict[SortedArch] = [Statement]
+ NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Ppis', NewSectionDict, True, True)
-
+
return Content
## Transfer Package Object to Dec files
#
-# Transfer all contents of a standard Package Object to a Dec file
+# Transfer all contents of a standard Package Object to a Dec file
#
-# @param Package: A Package
+# @param Package: A Package
#
def PackageToDec(Package, DistHeader = None):
#
# Init global information for the file
#
ContainerFile = Package.GetFullPath()
-
+
Content = ''
-
+
#
- # Generate file header
+ # Generate file header
#
PackageAbstract = GetLocalValue(Package.GetAbstract())
PackageDescription = GetLocalValue(Package.GetDescription())
@@ -335,7 +335,7 @@ def PackageToDec(Package, DistHeader = None):
if not PackageLicense and DistHeader:
for (Lang, License) in DistHeader.GetLicense():
PackageLicense = License
-
+
#
# Generate header comment section of DEC file
#
@@ -345,7 +345,7 @@ def PackageToDec(Package, DistHeader = None):
PackageLicense).replace('\r\n', '\n')
#
- # Generate Binary header
+ # Generate Binary header
#
for UserExtension in Package.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
@@ -357,12 +357,12 @@ def PackageToDec(Package, DistHeader = None):
for (Lang, Copyright) in UserExtension.GetBinaryCopyright():
PackageBinaryCopyright = Copyright
for (Lang, License) in UserExtension.GetBinaryLicense():
- PackageBinaryLicense = License
+ PackageBinaryLicense = License
if PackageBinaryAbstract and PackageBinaryDescription and \
PackageBinaryCopyright and PackageBinaryLicense:
- Content += GenHeaderCommentSection(PackageBinaryAbstract,
- PackageBinaryDescription,
- PackageBinaryCopyright,
+ Content += GenHeaderCommentSection(PackageBinaryAbstract,
+ PackageBinaryDescription,
+ PackageBinaryCopyright,
PackageBinaryLicense,
True)
@@ -374,23 +374,23 @@ def PackageToDec(Package, DistHeader = None):
GenPackageUNIEncodeFile(Package, FileHeader)
#
- # for each section, maintain a dict, sorted arch will be its key,
+ # for each section, maintain a dict, sorted arch will be its key,
#statement list will be its data
# { 'Arch1 Arch2 Arch3': [statement1, statement2],
- # 'Arch1' : [statement1, statement3]
+ # 'Arch1' : [statement1, statement3]
# }
#
-
+
#
- # generate [Defines] section
+ # generate [Defines] section
#
LeftOffset = 31
NewSectionDict = {TAB_ARCH_COMMON : []}
SpecialItemList = []
-
+
Statement = (u'%s ' % TAB_DEC_DEFINES_DEC_SPECIFICATION).ljust(LeftOffset) + u'= %s' % '0x00010017'
SpecialItemList.append(Statement)
-
+
BaseName = Package.GetBaseName()
if BaseName.startswith('.') or BaseName.startswith('-'):
BaseName = '_' + BaseName
@@ -405,7 +405,7 @@ def PackageToDec(Package, DistHeader = None):
if Package.UNIFlag:
Statement = (u'%s ' % TAB_DEC_DEFINES_PKG_UNI_FILE).ljust(LeftOffset) + u'= %s' % Package.GetBaseName() + '.uni'
- SpecialItemList.append(Statement)
+ SpecialItemList.append(Statement)
for SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
@@ -417,7 +417,7 @@ def PackageToDec(Package, DistHeader = None):
#
NewSectionDict = {}
IncludeArchList = Package.GetIncludeArchList()
- if IncludeArchList:
+ if IncludeArchList:
for Path, ArchList in IncludeArchList:
Statement = Path
ArchList.sort()
@@ -465,7 +465,7 @@ def PackageToDec(Package, DistHeader = None):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
- NewSectionDict[SortedArch] = [Statement]
+ NewSectionDict[SortedArch] = [Statement]
Content += GenSection('LibraryClasses', NewSectionDict, True, True)
@@ -473,7 +473,7 @@ def PackageToDec(Package, DistHeader = None):
# Generate '# [Error.<TokenSpcCName>]' section
#
Content = GenPcdErrorMsgSection(Package, Content)
-
+
Content = GenPcd(Package, Content)
#
@@ -498,7 +498,7 @@ def PackageToDec(Package, DistHeader = None):
NewStatement = ""
for Line in LineList:
NewStatement += " %s\n" % Line
-
+
SectionList = []
SectionName = 'UserExtensions'
UserId = UserExtension.GetUserID()
@@ -533,7 +533,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
BinaryAbstract = []
BinaryDescription = []
#
- # If more than one language code is used for any element that would be present in the PACKAGE_UNI_FILE,
+ # If more than one language code is used for any element that would be present in the PACKAGE_UNI_FILE,
# then the PACKAGE_UNI_FILE must be created.
#
for (Key, Value) in PackageObject.GetAbstract() + PackageObject.GetDescription():
@@ -541,7 +541,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
-
+
for UserExtension in PackageObject.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
@@ -565,7 +565,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
-
+
for PcdError in Pcd.GetPcdErrorsList():
if PcdError.GetErrorNumber().startswith('0x') or PcdError.GetErrorNumber().startswith('0X'):
for (Key, Value) in PcdError.GetErrorMessageList():
@@ -579,26 +579,26 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
return
else:
PackageObject.UNIFlag = True
-
+
if not os.path.exists(os.path.dirname(PackageObject.GetFullPath())):
os.makedirs(os.path.dirname(PackageObject.GetFullPath()))
-
+
ContainerFile = GetUniFileName(os.path.dirname(PackageObject.GetFullPath()), PackageObject.GetBaseName())
Content = UniFileHeader + '\r\n'
Content += '\r\n'
-
+
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_ABSTRACT, PackageObject.GetAbstract(), ContainerFile) + '\r\n'
-
+
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_DESCRIPTION, PackageObject.GetDescription(), ContainerFile) \
+ '\r\n'
-
+
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) + '\r\n'
-
+
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_DESCRIPTION, BinaryDescription, ContainerFile) + '\r\n'
-
+
PromptGenList = []
- HelpTextGenList = []
+ HelpTextGenList = []
for Pcd in PackageObject.GetPcdList():
# Generate Prompt for each Pcd
PcdPromptStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_PROMPT '
@@ -607,7 +607,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
Lang = TxtObj.GetLang()
PromptStr = TxtObj.GetString()
#
- # Avoid generating the same PROMPT entry more than one time.
+ # Avoid generating the same PROMPT entry more than one time.
#
if (PcdPromptStrName, Lang) not in PromptGenList:
TokenValueList.append((Lang, PromptStr))
@@ -615,7 +615,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PromptString = FormatUniEntry(PcdPromptStrName, TokenValueList, ContainerFile) + '\r\n'
if PromptString not in Content:
Content += PromptString
-
+
# Generate Help String for each Pcd
PcdHelpStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_HELP '
TokenValueList = []
@@ -623,7 +623,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
Lang = TxtObj.GetLang()
HelpStr = TxtObj.GetString()
#
- # Avoid generating the same HELP entry more than one time.
+ # Avoid generating the same HELP entry more than one time.
#
if (PcdHelpStrName, Lang) not in HelpTextGenList:
TokenValueList.append((Lang, HelpStr))
@@ -631,7 +631,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
HelpTextString = FormatUniEntry(PcdHelpStrName, TokenValueList, ContainerFile) + '\r\n'
if HelpTextString not in Content:
Content += HelpTextString
-
+
# Generate PcdError for each Pcd if ErrorNo exist.
for PcdError in Pcd.GetPcdErrorsList():
ErrorNo = PcdError.GetErrorNumber()
@@ -642,7 +642,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PcdErrString = FormatUniEntry(PcdErrStrName, PcdError.GetErrorMessageList(), ContainerFile) + '\r\n'
if PcdErrString not in Content:
Content += PcdErrString
-
+
File = codecs.open(ContainerFile, 'w', Encoding)
File.write(u'\uFEFF' + Content)
File.stream.close()
@@ -652,23 +652,23 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PackageObject.FileList.append((ContainerFile, Md5Sum))
return ContainerFile
-
+
## GenPcdErrComment
#
# @param PcdErrObject: PcdErrorObject
-#
+#
# @retval CommentStr: Generated comment lines, with prefix "#"
-#
+#
def GenPcdErrComment (PcdErrObject):
- CommentStr = ''
+ CommentStr = ''
ErrorCode = PcdErrObject.GetErrorNumber()
ValidValueRange = PcdErrObject.GetValidValueRange()
if ValidValueRange:
- CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDRANGE + TAB_SPACE_SPLIT
+ CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDRANGE + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += ValidValueRange + END_OF_LINE
-
+
ValidValue = PcdErrObject.GetValidValue()
if ValidValue:
ValidValueList = \
@@ -677,13 +677,13 @@ def GenPcdErrComment (PcdErrObject):
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += TAB_COMMA_SPLIT.join(ValidValueList) + END_OF_LINE
-
+
Expression = PcdErrObject.GetExpression()
if Expression:
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_EXPRESSION + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += Expression + END_OF_LINE
-
+
return CommentStr
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
index 9457f851f4..b62c705137 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
@@ -4,9 +4,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -46,8 +46,8 @@ from Library.StringUtils import GetUniFileName
## Transfer Module Object to Inf files
#
-# Transfer all contents of a standard Module Object to an Inf file
-# @param ModuleObject: A Module Object
+# Transfer all contents of a standard Module Object to an Inf file
+# @param ModuleObject: A Module Object
#
def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
if not GlobalData.gWSPKG_LIST:
@@ -59,9 +59,9 @@ def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
Content = ''
#
- # Generate file header, If any Abstract, Description, Copyright or License XML elements are missing,
- # should 1) use the Abstract, Description, Copyright or License from the PackageSurfaceArea.Header elements
- # that the module belongs to, or 2) if this is a stand-alone module that is not included in a PackageSurfaceArea,
+ # Generate file header, If any Abstract, Description, Copyright or License XML elements are missing,
+ # should 1) use the Abstract, Description, Copyright or License from the PackageSurfaceArea.Header elements
+ # that the module belongs to, or 2) if this is a stand-alone module that is not included in a PackageSurfaceArea,
# use the abstract, description, copyright or license from the DistributionPackage.Header elements.
#
ModuleAbstract = GetLocalValue(ModuleObject.GetAbstract())
@@ -107,15 +107,15 @@ def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
#
# Generate header comment section of INF file
- #
+ #
Content += GenHeaderCommentSection(ModuleAbstract,
ModuleDescription,
ModuleCopyright,
ModuleLicense).replace('\r\n', '\n')
#
- # Generate Binary Header
- #
+ # Generate Binary Header
+ #
for UserExtension in ModuleObject.GetUserExtensionList():
if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
@@ -152,10 +152,10 @@ def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
else:
GlobalData.gIS_BINARY_INF = False
#
- # for each section, maintain a dict, sorted arch will be its key,
+ # for each section, maintain a dict, sorted arch will be its key,
# statement list will be its data
# { 'Arch1 Arch2 Arch3': [statement1, statement2],
- # 'Arch1' : [statement1, statement3]
+ # 'Arch1' : [statement1, statement3]
# }
#
# Gen section contents
@@ -197,7 +197,7 @@ def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCOD
BinaryAbstract = []
BinaryDescription = []
#
- # If more than one language code is used for any element that would be present in the MODULE_UNI_FILE,
+ # If more than one language code is used for any element that would be present in the MODULE_UNI_FILE,
# then the MODULE_UNI_FILE must be created.
#
for (Key, Value) in ModuleObject.GetAbstract() + ModuleObject.GetDescription():
@@ -300,11 +300,11 @@ def GenDefines(ModuleObject):
BaseName = '_' + BaseName
Statement = (u'%s ' % DT.TAB_INF_DEFINES_BASE_NAME).ljust(LeftOffset) + u'= %s' % BaseName
SpecialStatementList.append(Statement)
-
+
# TAB_INF_DEFINES_FILE_GUID
Statement = (u'%s ' % DT.TAB_INF_DEFINES_FILE_GUID).ljust(LeftOffset) + u'= %s' % ModuleObject.GetGuid()
SpecialStatementList.append(Statement)
-
+
# TAB_INF_DEFINES_VERSION_STRING
Statement = (u'%s ' % DT.TAB_INF_DEFINES_VERSION_STRING).ljust(LeftOffset) + u'= %s' % ModuleObject.GetVersion()
SpecialStatementList.append(Statement)
@@ -480,7 +480,7 @@ def GenPackages(ModuleObject):
Path = ''
#
# find package path/name
- #
+ #
for PkgInfo in GlobalData.gWSPKG_LIST:
if Guid == PkgInfo[1]:
if (not Version) or (Version == PkgInfo[2]):
@@ -553,7 +553,7 @@ def GenDepex(ModuleObject):
else:
NewSectionDict[Key] = [Statement]
Content += GenSection('Depex', NewSectionDict, False)
-
+
return Content
## GenUserExtensions
#
@@ -673,7 +673,7 @@ def GenBinaryStatement(Key, Value, SubTypeGuidValue=None):
Statement += '|' + Target
return Statement
## GenGuidSections
-#
+#
# @param GuidObjList: List of GuidObject
# @retVal Content: The generated section contents
#
@@ -736,7 +736,7 @@ def GenGuidSections(GuidObjList):
return Content
## GenProtocolPPiSections
-#
+#
# @param ObjList: List of ProtocolObject or Ppi Object
# @retVal Content: The generated section contents
#
@@ -804,7 +804,7 @@ def GenPcdSections(ModuleObject):
Content = ''
if not GlobalData.gIS_BINARY_INF:
#
- # for each Pcd Itemtype, maintain a dict so the same type will be grouped
+ # for each Pcd Itemtype, maintain a dict so the same type will be grouped
# together
#
ItemTypeDict = {}
@@ -866,7 +866,7 @@ def GenPcdSections(ModuleObject):
if NewSectionDict:
Content += GenSection(ItemType, NewSectionDict)
#
- # For AsBuild INF files
+ # For AsBuild INF files
#
else:
Content += GenAsBuiltPacthPcdSections(ModuleObject)
@@ -905,7 +905,7 @@ def GenAsBuiltPacthPcdSections(ModuleObject):
Statement = HelpString + TokenSpaceName + '.' + PcdCName + ' | ' + PcdValue + ' | ' + \
PcdOffset + DT.TAB_SPACE_SPLIT
#
- # Use binary file's Arch to be Pcd's Arch
+ # Use binary file's Arch to be Pcd's Arch
#
ArchList = []
FileNameObjList = BinaryFile.GetFileNameList()
@@ -954,7 +954,7 @@ def GenAsBuiltPcdExSections(ModuleObject):
Statement = HelpString + TokenSpaceName + DT.TAB_SPLIT + PcdCName + DT.TAB_SPACE_SPLIT
#
- # Use binary file's Arch to be Pcd's Arch
+ # Use binary file's Arch to be Pcd's Arch
#
ArchList = []
FileNameObjList = BinaryFile.GetFileNameList()
@@ -1033,7 +1033,7 @@ def GenSpecialSections(ObjectList, SectionName, UserExtensionsContent=''):
Content = Content.lstrip()
#
# add a return to differentiate it between other possible sections
- #
+ #
if Content:
Content += '\n'
return Content
@@ -1110,7 +1110,7 @@ def GenBinaries(ModuleObject):
else:
NewSectionDict[SortedArch] = [Statement]
#
- # as we already generated statement for this DictKey here set the Valuelist to be empty
+ # as we already generated statement for this DictKey here set the Valuelist to be empty
# to avoid generate duplicate entries as the DictKey may have multiple entries
#
BinariesDict[Key] = []
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py
index ae8dc85e3e..da3972ccc3 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py
@@ -2,11 +2,11 @@
#
# This file contained the miscellaneous routines for GenMetaFile usage.
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -26,7 +26,7 @@ from Parser.DecParser import Dec
# @param SectionDict: string of source file path/name
# @param Arch: string of source file family field
# @param ExternList: string of source file FeatureFlag field
-#
+#
def AddExternToDefineSec(SectionDict, Arch, ExternList):
LeftOffset = 31
for ArchList, EntryPoint, UnloadImage, Constructor, Destructor, FFE, HelpStringList in ExternList:
@@ -93,7 +93,7 @@ def ObtainPcdName(Packages, TokenSpaceGuidValue, Token):
Path = None
#
# find package path/name
- #
+ #
for PkgInfo in GlobalData.gWSPKG_LIST:
if Guid == PkgInfo[1]:
if (not Version) or (Version == PkgInfo[2]):
@@ -156,9 +156,9 @@ def ObtainPcdName(Packages, TokenSpaceGuidValue, Token):
return TokenSpaceGuidName, PcdCName
## _TransferDict
-# transfer dict that using (Statement, SortedArch) as key,
+# transfer dict that using (Statement, SortedArch) as key,
# (GenericComment, UsageComment) as value into a dict that using SortedArch as
-# key and NewStatement as value
+# key and NewStatement as value
#
def TransferDict(OrigDict, Type=None):
NewDict = {}
@@ -171,7 +171,7 @@ def TransferDict(OrigDict, Type=None):
for Statement, SortedArch in OrigDict:
if len(Statement) > LeftOffset:
LeftOffset = len(Statement)
-
+
for Statement, SortedArch in OrigDict:
Comment = OrigDict[Statement, SortedArch]
#
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py
index b1f427723c..3376091367 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py
@@ -2,11 +2,11 @@
#
# This file contained the logical of generate XML files.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -15,4 +15,4 @@
'''
GenXmlFile
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py b/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py
index 269ba0bc48..bf0b3bc07c 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
GenMetaFile
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/InstallPkg.py b/BaseTools/Source/Python/UPT/InstallPkg.py
index dc22ff7e34..3573886d5a 100644
--- a/BaseTools/Source/Python/UPT/InstallPkg.py
+++ b/BaseTools/Source/Python/UPT/InstallPkg.py
@@ -1,11 +1,11 @@
## @file
# Install distribution package.
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -102,7 +102,7 @@ def InstallNewModule(WorkspaceDir, Path, PathList = None):
Logger.Info(ST.MSG_RELATIVE_PATH_ONLY%FullPath)
else:
return Path
-
+
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input == '':
@@ -110,7 +110,7 @@ def InstallNewModule(WorkspaceDir, Path, PathList = None):
Input = Input.replace('\r', '').replace('\n', '')
return InstallNewModule(WorkspaceDir, Input, PathList)
-
+
## InstallNewFile
#
# @param WorkspaceDir: Workspace Direction
@@ -137,15 +137,15 @@ def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
ContentZipFile = None
Logger.Quiet(ST.MSG_UZIP_PARSE_XML)
DistFile = PackageFile(DpPkgFileName)
-
+
DpDescFileName, ContentFileName = GetDPFile(DistFile.GetZipFile())
-
+
TempDir = os.path.normpath(os.path.join(WorkspaceDir, "Conf/.tmp%s" % str(Index)))
GlobalData.gUNPACK_DIR.append(TempDir)
DistPkgFile = DistFile.UnpackFile(DpDescFileName, os.path.normpath(os.path.join(TempDir, DpDescFileName)))
if not DistPkgFile:
Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_FILE_BROKEN %DpDescFileName)
-
+
#
# Generate distpkg
#
@@ -166,10 +166,10 @@ def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
#
# Get file size
- #
+ #
FileSize = os.path.getsize(ContentFile)
-
- if FileSize != 0:
+
+ if FileSize != 0:
ContentZipFile = PackageFile(ContentFile)
#
@@ -201,12 +201,12 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
NewPackagePath = InstallNewPackage(WorkspaceDir, GuidedPkgPath, Options.CustomPath)
else:
NewPackagePath = InstallNewPackage(WorkspaceDir, PackagePath, Options.CustomPath)
- InstallPackageContent(PackagePath, NewPackagePath, Package, ContentZipFile, Dep, WorkspaceDir, ModuleList,
+ InstallPackageContent(PackagePath, NewPackagePath, Package, ContentZipFile, Dep, WorkspaceDir, ModuleList,
DistPkg.Header.ReadOnly)
PackageList.append(Package)
-
+
NewDict[Guid, Version, Package.GetPackagePath()] = Package
-
+
#
# Now generate meta-data files, first generate all dec for package
# dec should be generated before inf, and inf should be generated after
@@ -219,7 +219,7 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
Md5Sum = Md5Sigature.hexdigest()
if (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
-
+
return NewDict
## GetModuleList
@@ -229,18 +229,18 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
#
# ModulePathList will keep track of the standalone module path that
- # we just installed. If a new module's path in that list
- # (only multiple INF in one directory will be so), we will
- # install them directly. If not, we will try to create a new directory
+ # we just installed. If a new module's path in that list
+ # (only multiple INF in one directory will be so), we will
+ # install them directly. If not, we will try to create a new directory
# for it.
#
ModulePathList = []
-
+
#
# Check module exist and install
#
Module = None
- NewDict = Sdict()
+ NewDict = Sdict()
for Guid, Version, Name, Path in DistPkg.ModuleSurfaceArea:
ModulePath = Path
Module = DistPkg.ModuleSurfaceArea[Guid, Version, Name, Path]
@@ -259,14 +259,14 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
ModulePathList.append(NewModuleFullPath)
else:
NewModulePath = ModulePath
-
- InstallModuleContent(ModulePath, NewModulePath, '', Module, ContentZipFile, WorkspaceDir, ModuleList, None,
+
+ InstallModuleContent(ModulePath, NewModulePath, '', Module, ContentZipFile, WorkspaceDir, ModuleList, None,
DistPkg.Header.ReadOnly)
#
# Update module
#
Module.SetModulePath(Module.GetModulePath().replace(Path, NewModulePath, 1))
-
+
NewDict[Guid, Version, Name, Module.GetModulePath()] = Module
#
@@ -289,7 +289,7 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
for (FilePath, Md5Sum) in Module.FileList:
if str(FilePath).endswith('.uni') and Package and (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
-
+
return NewDict
##
@@ -303,7 +303,7 @@ def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
DependentPpiCNames = []
DependentGuidCNames = []
DependentPcdNames = []
-
+
for PackageObj in DePackageObjList:
#
# Get protocol CName list from all dependent DEC file
@@ -312,29 +312,29 @@ def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
for Protocol in PackageObj.GetProtocolList():
if Protocol.GetCName() not in ProtocolCNames:
ProtocolCNames.append(Protocol.GetCName())
-
+
DependentProtocolCNames.append(ProtocolCNames)
-
+
#
# Get Ppi CName list from all dependent DEC file
- #
+ #
PpiCNames = []
for Ppi in PackageObj.GetPpiList():
if Ppi.GetCName() not in PpiCNames:
PpiCNames.append(Ppi.GetCName())
DependentPpiCNames.append(PpiCNames)
-
+
#
# Get Guid CName list from all dependent DEC file
- #
+ #
GuidCNames = []
for Guid in PackageObj.GetGuidList():
if Guid.GetCName() not in GuidCNames:
GuidCNames.append(Guid.GetCName())
-
+
DependentGuidCNames.append(GuidCNames)
-
+
#
# Get PcdName list from all dependent DEC file
#
@@ -343,10 +343,10 @@ def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
PcdName = '.'.join([Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()])
if PcdName not in PcdNames:
PcdNames.append(PcdName)
-
+
DependentPcdNames.append(PcdNames)
-
-
+
+
return DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames
##
@@ -358,8 +358,8 @@ def CheckProtoclCNameRedefined(Module, DependentProtocolCNames):
for PackageProtocolCNames in DependentProtocolCNames:
if ProtocolInModule.GetCName() in PackageProtocolCNames:
if IsCNameDefined:
- Logger.Error("\nUPT", FORMAT_INVALID,
- File = Module.GetFullPath(),
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
ExtraData = \
ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % ProtocolInModule.GetCName())
else:
@@ -374,11 +374,11 @@ def CheckPpiCNameRedefined(Module, DependentPpiCNames):
for PackagePpiCNames in DependentPpiCNames:
if PpiInModule.GetCName() in PackagePpiCNames:
if IsCNameDefined:
- Logger.Error("\nUPT", FORMAT_INVALID,
- File = Module.GetFullPath(),
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PpiInModule.GetCName())
else:
- IsCNameDefined = True
+ IsCNameDefined = True
##
# Check if Guid CName is redefined
@@ -389,8 +389,8 @@ def CheckGuidCNameRedefined(Module, DependentGuidCNames):
for PackageGuidCNames in DependentGuidCNames:
if GuidInModule.GetCName() in PackageGuidCNames:
if IsCNameDefined:
- Logger.Error("\nUPT", FORMAT_INVALID,
- File = Module.GetFullPath(),
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
ExtraData = \
ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % GuidInModule.GetCName())
else:
@@ -414,8 +414,8 @@ def CheckPcdNameRedefined(Module, DependentPcdNames):
for PcdNames in DependentPcdNames:
if PcdName in PcdNames:
if IsPcdNameDefined:
- Logger.Error("\nUPT", FORMAT_INVALID,
- File = Module.GetFullPath(),
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PcdName)
else:
IsPcdNameDefined = True
@@ -427,7 +427,7 @@ def CheckCNameInModuleRedefined(Module, DistPkg):
DePackageObjList = []
#
# Get all dependent package objects
- #
+ #
for Obj in Module.GetPackageDependencyList():
Guid = Obj.GetGuid()
Version = Obj.GetVersion()
@@ -435,7 +435,7 @@ def CheckCNameInModuleRedefined(Module, DistPkg):
if Key[0] == Guid and Key[1] == Version:
if DistPkg.PackageSurfaceArea[Key] not in DePackageObjList:
DePackageObjList.append(DistPkg.PackageSurfaceArea[Key])
-
+
DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames = \
GetDepProtocolPpiGuidPcdNames(DePackageObjList)
@@ -457,7 +457,7 @@ def GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile):
ToolFileNum = 0
FileNum = 0
RootDir = WorkspaceDir
-
+
#
# FileList stores both tools files and misc files
# Misc file list must be appended to FileList *AFTER* Tools file list
@@ -506,7 +506,7 @@ def Main(Options = None):
WorkspaceDir = GlobalData.gWORKSPACE
if not Options.PackageFile:
Logger.Error("InstallPkg", OPTION_MISSING, ExtraData=ST.ERR_SPECIFY_PACKAGE)
-
+
# Get all Dist Info
DistInfoList = []
DistPkgList = []
@@ -536,17 +536,17 @@ def Main(Options = None):
InstallDp(ToBeInstalledDist[0], ToBeInstalledDist[2], ToBeInstalledDist[1],
Options, Dep, WorkspaceDir, DataBase)
ReturnCode = 0
-
+
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
-
+
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
-
+
except:
ReturnCode = CODE_ERROR
Logger.Error(
@@ -574,8 +574,8 @@ def Main(Options = None):
return ReturnCode
# BackupDist method
-#
-# This method will backup the Distribution file into the $(WORKSPACE)/conf/upt, and rename it
+#
+# This method will backup the Distribution file into the $(WORKSPACE)/conf/upt, and rename it
# if there is already a same-named distribution existed.
#
# @param DpPkgFileName: The distribution path
@@ -645,19 +645,19 @@ def CheckInstallDpx(Dep, DistPkg, DistPkgFileName):
#
def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
WorkspaceDir, ModuleList, Package = None, ReadOnly = False):
-
+
if NewPath.startswith("\\") or NewPath.startswith("/"):
NewPath = NewPath[1:]
-
+
if not IsValidInstallPath(NewPath):
- Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%NewPath)
-
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%NewPath)
+
NewModuleFullPath = os.path.normpath(os.path.join(WorkspaceDir, NewPath,
ConvertPath(ModulePath)))
Module.SetFullPath(os.path.normpath(os.path.join(NewModuleFullPath,
ConvertPath(Module.GetName()) + '.inf')))
Module.FileList = []
-
+
for MiscFile in Module.GetMiscFileList():
if not MiscFile:
continue
@@ -665,12 +665,12 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
File = Item.GetURI()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
-
+
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
- Executable = Item.GetExecutable()
+ Executable = Item.GetExecutable()
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
@@ -683,10 +683,10 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
File = Item.GetSourceFile()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
-
+
if not IsValidInstallPath(File):
- Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
-
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
+
FromFile = os.path.join(FromPath, ModulePath, File)
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
@@ -698,24 +698,24 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
Module.FileList.append((ToFile, Md5Sum))
for Item in Module.GetBinaryFileList():
FileNameList = Item.GetFileNameList()
- for FileName in FileNameList:
- File = FileName.GetFilename()
+ for FileName in FileNameList:
+ File = FileName.GetFilename()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
-
+
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
- Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
Package.FileList.append((ToFile, Md5Sum))
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
-
+
InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
ModuleList)
@@ -726,7 +726,7 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
ModuleList):
#
- # Extract other files under current module path in content Zip file but not listed in the description
+ # Extract other files under current module path in content Zip file but not listed in the description
#
if ContentZipFile:
for FileName in ContentZipFile.GetZipFile().namelist():
@@ -735,12 +735,12 @@ def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceD
if FileUnderPath(FileName, CheckPath):
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
-
+
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
-
+
FromFile = FileName
- ToFile = os.path.normpath(os.path.join(WorkspaceDir,
+ ToFile = os.path.normpath(os.path.join(WorkspaceDir,
ConvertPath(FileName.replace(FromPath, NewPath, 1))))
CheckList = copy.copy(Module.FileList)
if Package:
@@ -755,16 +755,16 @@ def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceD
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
- Module.FileList.append((ToFile, Md5Sum))
-
+ Module.FileList.append((ToFile, Md5Sum))
+
ModuleList.append((Module, Package))
## FileUnderPath
-# Check whether FileName started with directory specified by CheckPath
+# Check whether FileName started with directory specified by CheckPath
#
# @param FileName: the FileName need to be checked
# @param CheckPath: the path need to be checked against
-# @return: True or False
+# @return: True or False
#
def FileUnderPath(FileName, CheckPath):
FileName = FileName.replace('\\', '/')
@@ -777,13 +777,13 @@ def FileUnderPath(FileName, CheckPath):
RemainingPath = RemainingPath[1:]
if FileName == os.path.normpath(os.path.join(CheckPath, RemainingPath)):
return True
-
+
return False
## InstallFile
# Extract File from Zipfile, set file attribute, and return the Md5Sum
#
-# @return: True or False
+# @return: True or False
#
def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
if os.path.exists(os.path.normpath(ToFile)):
@@ -802,7 +802,7 @@ def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
stat.S_IWOTH | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
else:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
-
+
Md5Sigature = md5.new(__FileHookOpen__(str(ToFile), 'rb').read())
Md5Sum = Md5Sigature.hexdigest()
@@ -823,44 +823,44 @@ def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
if Dep:
pass
Package.FileList = []
-
+
if ToPath.startswith("\\") or ToPath.startswith("/"):
ToPath = ToPath[1:]
-
+
if not IsValidInstallPath(ToPath):
- Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%ToPath)
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%ToPath)
if FromPath.startswith("\\") or FromPath.startswith("/"):
FromPath = FromPath[1:]
-
+
if not IsValidInstallPath(FromPath):
- Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FromPath)
-
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FromPath)
+
PackageFullPath = os.path.normpath(os.path.join(WorkspaceDir, ToPath))
for MiscFile in Package.GetMiscFileList():
for Item in MiscFile.GetFileList():
FileName = Item.GetURI()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
-
+
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
-
+
FromFile = os.path.join(FromPath, FileName)
Executable = Item.GetExecutable()
ToFile = (os.path.join(PackageFullPath, ConvertPath(FileName)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
- PackageIncludeArchList = []
+ PackageIncludeArchList = []
for Item in Package.GetPackageIncludeFileList():
FileName = Item.GetFilePath()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
-
+
if not IsValidInstallPath(FileName):
- Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
-
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
+
FromFile = os.path.join(FromPath, FileName)
ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
RetFile = ContentZipFile.UnpackFile(FromFile, ToFile)
@@ -875,21 +875,21 @@ def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
if ReadOnly:
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
else:
- chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
+ chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
Md5Sigature = md5.new(__FileHookOpen__(str(ToFile), 'rb').read())
Md5Sum = Md5Sigature.hexdigest()
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
Package.SetIncludeArchList(PackageIncludeArchList)
-
+
for Item in Package.GetStandardIncludeFileList():
FileName = Item.GetFilePath()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
-
+
if not IsValidInstallPath(FileName):
- Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
-
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
+
FromFile = os.path.join(FromPath, FileName)
ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
@@ -932,7 +932,7 @@ def GetDPFile(ZipFile):
continue
else:
continue
-
+
Logger.Error("PackagingTool", FILE_TYPE_MISMATCH,
ExtraData=ST.ERR_DIST_FILE_TOOMANY)
if not DescFile or not ContentFile:
@@ -951,13 +951,13 @@ def InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir
#
PackageList = []
ModuleList = []
- DistPkg.PackageSurfaceArea = GetPackageList(DistPkg, Dep, WorkspaceDir, Options,
+ DistPkg.PackageSurfaceArea = GetPackageList(DistPkg, Dep, WorkspaceDir, Options,
ContentZipFile, ModuleList, PackageList)
DistPkg.ModuleSurfaceArea = GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList)
-
+
GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile)
-
+
#
# copy "Distribution File" to directory $(WORKSPACE)/conf/upt
#
@@ -968,6 +968,6 @@ def InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir
# update database
#
Logger.Quiet(ST.MSG_UPDATE_PACKAGE_DATABASE)
- DataBase.AddDPObject(DistPkg, NewDpPkgFileName, DistFileName,
+ DataBase.AddDPObject(DistPkg, NewDpPkgFileName, DistFileName,
DistPkg.Header.RePackage)
diff --git a/BaseTools/Source/Python/UPT/InventoryWs.py b/BaseTools/Source/Python/UPT/InventoryWs.py
index cd92753a8d..c74e208631 100644
--- a/BaseTools/Source/Python/UPT/InventoryWs.py
+++ b/BaseTools/Source/Python/UPT/InventoryWs.py
@@ -1,11 +1,11 @@
## @file
# Inventory workspace's distribution package information.
#
-# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -37,7 +37,7 @@ from Library import GlobalData
#
def InventoryDistInstalled(DataBase):
DistInstalled = DataBase.InventoryDistInstalled()
-
+
#
# find the max length for each item
#
@@ -47,9 +47,9 @@ def InventoryDistInstalled(DataBase):
DpOriginalNameStr = "DpOriginalName"
MaxGuidlen = len(DpGuidStr)
MaxVerlen = len(DpVerStr)
- MaxDpAliasFileNameLen = len(DpNameStr)
+ MaxDpAliasFileNameLen = len(DpNameStr)
MaxDpOrigFileNamelen = len(DpOriginalNameStr)
-
+
for (DpGuid, DpVersion, DpOriginalName, DpAliasFileName) in DistInstalled:
MaxGuidlen = max(MaxGuidlen, len(DpGuid))
MaxVerlen = max(MaxVerlen, len(DpVersion))
@@ -57,22 +57,22 @@ def InventoryDistInstalled(DataBase):
MaxDpOrigFileNamelen = max(MaxDpOrigFileNamelen, len(DpOriginalName))
OutMsgFmt = "%-*s\t%-*s\t%-*s\t%-s"
- OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
- DpNameStr,
- MaxGuidlen,
- DpGuidStr,
- MaxVerlen,
- DpVerStr,
+ OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
+ DpNameStr,
+ MaxGuidlen,
+ DpGuidStr,
+ MaxVerlen,
+ DpVerStr,
DpOriginalNameStr)
Logger.Info(OutMsg)
-
+
for (DpGuid, DpVersion, DpFileName, DpAliasFileName) in DistInstalled:
- OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
- DpAliasFileName,
- MaxGuidlen,
- DpGuid,
- MaxVerlen,
- DpVersion,
+ OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
+ DpAliasFileName,
+ MaxGuidlen,
+ DpGuid,
+ MaxVerlen,
+ DpVersion,
DpFileName)
Logger.Info(OutMsg)
@@ -90,13 +90,13 @@ def Main(Options = None):
try:
DataBase = GlobalData.gDB
- InventoryDistInstalled(DataBase)
- ReturnCode = 0
+ InventoryDistInstalled(DataBase)
+ ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
- except KeyboardInterrupt:
+ except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
@@ -113,5 +113,5 @@ def Main(Options = None):
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
-
- return ReturnCode \ No newline at end of file
+
+ return ReturnCode
diff --git a/BaseTools/Source/Python/UPT/Library/CommentGenerating.py b/BaseTools/Source/Python/UPT/Library/CommentGenerating.py
index ab1725ff0e..4726629695 100644
--- a/BaseTools/Source/Python/UPT/Library/CommentGenerating.py
+++ b/BaseTools/Source/Python/UPT/Library/CommentGenerating.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -39,21 +39,21 @@ from Library.Misc import GetLocalValue
## GenTailCommentLines
#
# @param TailCommentLines: the tail comment lines that need to be generated
-# @param LeadingSpaceNum: the number of leading space needed for non-first
+# @param LeadingSpaceNum: the number of leading space needed for non-first
# line tail comment
-#
+#
def GenTailCommentLines (TailCommentLines, LeadingSpaceNum = 0):
TailCommentLines = TailCommentLines.rstrip(END_OF_LINE)
CommentStr = TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + \
(END_OF_LINE + LeadingSpaceNum * TAB_SPACE_SPLIT + TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + \
TAB_SPACE_SPLIT).join(GetSplitValueList(TailCommentLines, END_OF_LINE))
-
+
return CommentStr
## GenGenericComment
#
# @param CommentLines: Generic comment Text, maybe Multiple Lines
-#
+#
def GenGenericComment (CommentLines):
if not CommentLines:
return ''
@@ -68,8 +68,8 @@ def GenGenericComment (CommentLines):
# and for line with only <EOL>, '#\n' will be generated instead of '# \n'
#
# @param CommentLines: Generic comment Text, maybe Multiple Lines
-# @return CommentStr: Generated comment line
-#
+# @return CommentStr: Generated comment line
+#
def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibraryClass=False):
if not CommentLines:
return ''
@@ -104,7 +104,7 @@ def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibrary
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT * 16 + Line + END_OF_LINE
else:
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + Line + END_OF_LINE
-
+
return CommentStr
@@ -112,7 +112,7 @@ def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibrary
#
# Generate Header comment sections
#
-# @param Abstract One line of abstract
+# @param Abstract One line of abstract
# @param Description multiple lines of Description
# @param Copyright possible multiple copyright lines
# @param License possible multiple license lines
@@ -148,9 +148,9 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(Description, '\n'))
Content += '\r\n' + CommChar + '\r\n'
-
+
#
- # There is no '#\n' line to separate multiple copyright lines in code base
+ # There is no '#\n' line to separate multiple copyright lines in code base
#
if Copyright:
Copyright = Copyright.rstrip('\r\n')
@@ -163,12 +163,12 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(License, '\n'))
Content += '\r\n' + CommChar + '\r\n'
-
+
if CommChar == TAB_COMMENT_EDK1_SPLIT:
Content += CommChar + TAB_SPACE_SPLIT + TAB_STAR + TAB_COMMENT_EDK1_END + '\r\n'
else:
Content += CommChar * 2 + '\r\n'
-
+
return Content
@@ -177,11 +177,11 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
-#
+#
def GenInfPcdTailComment (Usage, TailCommentText):
if (Usage == ITEM_UNDEFINED) and (not TailCommentText):
return ''
-
+
CommentLine = TAB_SPACE_SPLIT.join([Usage, TailCommentText])
return GenTailCommentLines(CommentLine)
@@ -190,16 +190,16 @@ def GenInfPcdTailComment (Usage, TailCommentText):
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
-#
+#
def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
if (not Notify) and (Usage == ITEM_UNDEFINED) and (not TailCommentText):
return ''
-
+
if Notify:
CommentLine = USAGE_ITEM_NOTIFY + " ## "
else:
CommentLine = ''
-
+
CommentLine += TAB_SPACE_SPLIT.join([Usage, TailCommentText])
return GenTailCommentLines(CommentLine)
@@ -208,39 +208,39 @@ def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
-#
+#
def GenInfGuidTailComment (Usage, GuidTypeList, VariableName, TailCommentText):
GuidType = GuidTypeList[0]
if (Usage == ITEM_UNDEFINED) and (GuidType == ITEM_UNDEFINED) and \
(not TailCommentText):
return ''
-
- FirstLine = Usage + " ## " + GuidType
+
+ FirstLine = Usage + " ## " + GuidType
if GuidType == TAB_INF_GUIDTYPE_VAR:
FirstLine += ":" + VariableName
-
+
CommentLine = TAB_SPACE_SPLIT.join([FirstLine, TailCommentText])
return GenTailCommentLines(CommentLine)
## GenDecGuidTailComment
#
# @param SupModuleList: Supported module type list
-#
-def GenDecTailComment (SupModuleList):
+#
+def GenDecTailComment (SupModuleList):
CommentLine = TAB_SPACE_SPLIT.join(SupModuleList)
return GenTailCommentLines(CommentLine)
## _GetHelpStr
-# get HelpString from a list of HelpTextObject, the priority refer to
+# get HelpString from a list of HelpTextObject, the priority refer to
# related HLD
#
# @param HelpTextObjList: List of HelpTextObject
-#
+#
# @return HelpStr: the help text string found, '' means no help text found
#
def _GetHelpStr(HelpTextObjList):
- ValueList = []
+ ValueList = []
for HelpObj in HelpTextObjList:
ValueList.append((HelpObj.GetLang(), HelpObj.GetString()))
return GetLocalValue(ValueList, True)
diff --git a/BaseTools/Source/Python/UPT/Library/CommentParsing.py b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
index 8ee788bd77..285812c9c2 100644
--- a/BaseTools/Source/Python/UPT/Library/CommentParsing.py
+++ b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -69,7 +69,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
else:
STR_HEADER_COMMENT_START = "@file"
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
-
+
#
# first find the last copyright line
#
@@ -79,24 +79,24 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
if _IsCopyrightLine(Line):
Last = Index
break
-
+
for Item in CommentList:
Line = Item[0]
LineNo = Item[1]
-
+
if not Line.startswith(TAB_COMMENT_SPLIT) and Line:
Logger.Error("\nUPT", FORMAT_INVALID, ST.ERR_INVALID_COMMENT_FORMAT, FileName, Item[1])
Comment = CleanString2(Line)[1]
Comment = Comment.strip()
#
- # if there are blank lines between License or Description, keep them as they would be
+ # if there are blank lines between License or Description, keep them as they would be
# indication of different block; or in the position that Abstract should be, also keep it
# as it indicates that no abstract
#
if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
continue
-
+
if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
if Comment.startswith(STR_HEADER_COMMENT_START):
HeaderCommentStage = HEADER_COMMENT_ABSTRACT
@@ -114,20 +114,20 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
- else:
+ else:
Abstract += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
#
# in case there is no description
- #
+ #
if _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
- Description += Comment + EndOfLine
+ Description += Comment + EndOfLine
elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
if _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
@@ -136,23 +136,23 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
else:
#
# Contents after copyright line are license, those non-copyright lines in between
- # copyright line will be discarded
+ # copyright line will be discarded
#
if LineNo > Last:
if License:
License += EndOfLine
License += Comment + EndOfLine
- HeaderCommentStage = HEADER_COMMENT_LICENSE
+ HeaderCommentStage = HEADER_COMMENT_LICENSE
else:
if not Comment and not License:
continue
License += Comment + EndOfLine
-
+
return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
## _IsCopyrightLine
-# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
-# followed by zero or more white space characters followed by a "(" character
+# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
+# followed by zero or more white space characters followed by a "(" character
#
# @param LineContent: the line need to be checked
# @return: True if current line is copyright line, False else
@@ -160,7 +160,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
def _IsCopyrightLine (LineContent):
LineContent = LineContent.upper()
Result = False
-
+
ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
if ReIsCopyrightRe.search(LineContent):
Result = True
@@ -169,23 +169,23 @@ def _IsCopyrightLine (LineContent):
## ParseGenericComment
#
-# @param GenericComment: Generic comment list, element of
+# @param GenericComment: Generic comment list, element of
# (CommentLine, LineNum)
# @param ContainerFile: Input value for filename of Dec file
-#
+#
def ParseGenericComment (GenericComment, ContainerFile=None, SkipTag=None):
if ContainerFile:
pass
- HelpTxt = None
- HelpStr = ''
-
+ HelpTxt = None
+ HelpStr = ''
+
for Item in GenericComment:
CommentLine = Item[0]
Comment = CleanString2(CommentLine)[1]
if SkipTag is not None and Comment.startswith(SkipTag):
Comment = Comment.replace(SkipTag, '', 1)
HelpStr += Comment + '\n'
-
+
if HelpStr:
HelpTxt = TextObject()
if HelpStr.endswith('\n') and not HelpStr.endswith('\n\n') and HelpStr != '\n':
@@ -196,22 +196,22 @@ def ParseGenericComment (GenericComment, ContainerFile=None, SkipTag=None):
## ParsePcdErrorCode
#
-# @param Value: original ErrorCode value
+# @param Value: original ErrorCode value
# @param ContainerFile: Input value for filename of Dec file
-# @param LineNum: Line Num
-#
-def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
- try:
+# @param LineNum: Line Num
+#
+def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
+ try:
if Value.strip().startswith((TAB_HEX_START, TAB_CAPHEX_START)):
Base = 16
else:
Base = 10
ErrorCode = long(Value, Base)
if ErrorCode > PCD_ERR_CODE_MAX_SIZE or ErrorCode < 0:
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
#
# To delete the tailing 'L'
@@ -220,27 +220,27 @@ def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
except ValueError as XStr:
if XStr:
pass
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
-
+
## ParseDecPcdGenericComment
#
-# @param GenericComment: Generic comment list, element of (CommentLine,
+# @param GenericComment: Generic comment list, element of (CommentLine,
# LineNum)
# @param ContainerFile: Input value for filename of Dec file
-#
-def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCName, CName, MacroReplaceDict):
- HelpStr = ''
+#
+def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCName, CName, MacroReplaceDict):
+ HelpStr = ''
PromptStr = ''
PcdErr = None
PcdErrList = []
ValidValueNum = 0
ValidRangeNum = 0
ExpressionNum = 0
-
+
for (CommentLine, LineNum) in GenericComment:
Comment = CleanString2(CommentLine)[1]
#
@@ -252,13 +252,13 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
if MatchedStr:
Macro = MatchedStr.strip().lstrip('$(').rstrip(')').strip()
if Macro in MacroReplaceDict:
- Comment = Comment.replace(MatchedStr, MacroReplaceDict[Macro])
+ Comment = Comment.replace(MatchedStr, MacroReplaceDict[Macro])
if Comment.startswith(TAB_PCD_VALIDRANGE):
if ValidValueNum > 0 or ExpressionNum > 0:
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
@@ -280,21 +280,21 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
- Cause,
- ContainerFile,
+ Cause,
+ ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_VALIDLIST):
if ValidRangeNum > 0 or ExpressionNum > 0:
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
elif ValidValueNum > 0:
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_VALIDVALUE,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
@@ -317,15 +317,15 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
- Cause,
- ContainerFile,
+ Cause,
+ ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_EXPRESSION):
if ValidRangeNum > 0 or ValidValueNum > 0:
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
@@ -344,24 +344,24 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
else:
PcdErr.SetExpression(Expression)
PcdErrList.append(PcdErr)
- else:
+ else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
- Cause,
- ContainerFile,
- LineNum)
+ Cause,
+ ContainerFile,
+ LineNum)
elif Comment.startswith(TAB_PCD_PROMPT):
if PromptStr:
- Logger.Error('Parser',
+ Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_PROMPT,
- File = ContainerFile,
+ File = ContainerFile,
Line = LineNum)
PromptStr = Comment.replace(TAB_PCD_PROMPT, "", 1).strip()
else:
if Comment:
HelpStr += Comment + '\n'
-
+
#
# remove the last EOL if the comment is of format 'FOO\n'
#
@@ -384,9 +384,9 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
LineNum = TailCommentList[0][1]
Comment = TailComment.lstrip(" #")
-
+
ReFindFirstWordRe = re.compile(r"""^([^ #]*)""", re.DOTALL)
-
+
#
# get first word and compare with SUP_MODULE_LIST
#
@@ -398,7 +398,7 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
# parse line, it must have supported module type specified
#
if Comment.find(TAB_COMMENT_SPLIT) == -1:
- Comment += TAB_COMMENT_SPLIT
+ Comment += TAB_COMMENT_SPLIT
SupMode, HelpStr = GetSplitValueList(Comment, TAB_COMMENT_SPLIT, 1)
SupModuleList = []
for Mod in GetSplitValueList(SupMode, TAB_SPACE_SPLIT):
@@ -407,8 +407,8 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
elif Mod not in SUP_MODULE_LIST:
Logger.Error("UPT",
FORMAT_INVALID,
- ST.WRN_INVALID_MODULE_TYPE%Mod,
- ContainerFile,
+ ST.WRN_INVALID_MODULE_TYPE%Mod,
+ ContainerFile,
LineNum)
else:
SupModuleList.append(Mod)
@@ -417,15 +417,15 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
## _CheckListExpression
#
-# @param Expression: Pcd value list expression
+# @param Expression: Pcd value list expression
#
def _CheckListExpression(Expression):
ListExpr = ''
if TAB_VALUE_SPLIT in Expression:
- ListExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
+ ListExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
ListExpr = Expression
-
+
return IsValidListExpr(ListExpr)
## _CheckExpreesion
@@ -443,14 +443,14 @@ def _CheckExpression(Expression):
## _CheckRangeExpression
#
# @param Expression: Pcd range expression
-#
+#
def _CheckRangeExpression(Expression):
RangeExpr = ''
if TAB_VALUE_SPLIT in Expression:
RangeExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
RangeExpr = Expression
-
+
return IsValidRangeExpr(RangeExpr)
## ValidateCopyright
@@ -459,28 +459,28 @@ def _CheckRangeExpression(Expression):
#
def ValidateCopyright(Result, ErrType, FileName, LineNo, ErrMsg):
if not Result:
- Logger.Warn("\nUPT", ErrType, FileName, LineNo, ErrMsg)
+ Logger.Warn("\nUPT", ErrType, FileName, LineNo, ErrMsg)
## _ValidateCopyright
#
# @param Line: Line that contains copyright information, # stripped
-#
+#
# @retval Result: True if line is conformed to Spec format, False else
# @retval ErrMsg: the detailed error description
-#
+#
def _ValidateCopyright(Line):
if Line:
pass
Result = True
ErrMsg = ''
-
+
return Result, ErrMsg
def GenerateTokenList (Comment):
#
# Tokenize Comment using '#' and ' ' as token seperators
#
- RelplacedComment = None
+ RelplacedComment = None
while Comment != RelplacedComment:
RelplacedComment = Comment
Comment = Comment.replace('##', '#').replace(' ', ' ').replace(' ', '#').strip('# ')
@@ -500,25 +500,25 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
Usage = None
Type = None
String = None
-
+
Comment = Comment[0]
-
- NumTokens = 2
+
+ NumTokens = 2
if ParseVariable:
- #
- # Remove white space around first instance of ':' from Comment if 'Variable'
+ #
+ # Remove white space around first instance of ':' from Comment if 'Variable'
# is in front of ':' and Variable is the 1st or 2nd token in Comment.
#
- List = Comment.split(':', 1)
+ List = Comment.split(':', 1)
if len(List) > 1:
SubList = GenerateTokenList (List[0].strip())
if len(SubList) in [1, 2] and SubList[-1] == 'Variable':
- if List[1].strip().find('L"') == 0:
+ if List[1].strip().find('L"') == 0:
Comment = List[0].strip() + ':' + List[1].strip()
-
- #
+
+ #
# Remove first instance of L"<VariableName> from Comment and put into String
- # if and only if L"<VariableName>" is the 1st token, the 2nd token. Or
+ # if and only if L"<VariableName>" is the 1st token, the 2nd token. Or
# L"<VariableName>" is the third token immediately following 'Variable:'.
#
End = -1
@@ -533,25 +533,25 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
End = String[2:].find('"')
if End >= 0:
SubList = GenerateTokenList (Comment[:Start])
- if len(SubList) < 2:
+ if len(SubList) < 2:
Comment = Comment[:Start] + String[End + 3:]
String = String[:End + 3]
Type = 'Variable'
- NumTokens = 1
-
+ NumTokens = 1
+
#
- # Initialze HelpText to Comment.
+ # Initialze HelpText to Comment.
# Content will be remove from HelpText as matching tokens are found
- #
+ #
HelpText = Comment
-
+
#
# Tokenize Comment using '#' and ' ' as token seperators
#
List = GenerateTokenList (Comment)
-
+
#
- # Search first two tokens for Usage and Type and remove any matching tokens
+ # Search first two tokens for Usage and Type and remove any matching tokens
# from HelpText
#
for Token in List[0:NumTokens]:
@@ -563,39 +563,39 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
if Type is None and Token in TypeTokens:
Type = TypeTokens[Token]
HelpText = HelpText.replace(Token, '')
- if Usage is not None:
+ if Usage is not None:
for Token in List[0:NumTokens]:
if Token in RemoveTokens:
HelpText = HelpText.replace(Token, '')
-
+
#
# If no Usage token is present and set Usage to UNDEFINED
- #
+ #
if Usage is None:
Usage = 'UNDEFINED'
-
+
#
# If no Type token is present and set Type to UNDEFINED
- #
+ #
if Type is None:
Type = 'UNDEFINED'
-
+
#
# If Type is not 'Variable:', then set String to None
- #
+ #
if Type != 'Variable':
- String = None
-
+ String = None
+
#
# Strip ' ' and '#' from the beginning of HelpText
- # If HelpText is an empty string after all parsing is
+ # If HelpText is an empty string after all parsing is
# complete then set HelpText to None
- #
+ #
HelpText = HelpText.lstrip('# ')
if HelpText == '':
HelpText = None
-
+
#
# Return parsing results
- #
- return Usage, Type, String, HelpText
+ #
+ return Usage, Type, String, HelpText
diff --git a/BaseTools/Source/Python/UPT/Library/DataType.py b/BaseTools/Source/Python/UPT/Library/DataType.py
index c151be3bc4..97ca835882 100644
--- a/BaseTools/Source/Python/UPT/Library/DataType.py
+++ b/BaseTools/Source/Python/UPT/Library/DataType.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define class for data type structure
#
-# Copyright (c) 2011 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -64,13 +64,13 @@ USAGE_ITEM_UNDEFINED = 'UNDEFINED'
USAGE_CONSUMES_LIST = [USAGE_ITEM_CONSUMES,
'CONSUMED',
'ALWAYS_CONSUMED',
- 'ALWAYS_CONSUMES'
+ 'ALWAYS_CONSUMES'
]
USAGE_PRODUCES_LIST = [USAGE_ITEM_PRODUCES,
'PRODUCED',
'ALWAYS_PRODUCED',
- 'ALWAYS_PRODUCES'
+ 'ALWAYS_PRODUCES'
]
USAGE_SOMETIMES_PRODUCES_LIST = [USAGE_ITEM_SOMETIMES_PRODUCES,
@@ -94,7 +94,7 @@ TAB_STR_TOKENERR = 'ERR'
#
# Dictionary of usage tokens and their synonmys
-#
+#
ALL_USAGE_TOKENS = {
"PRODUCES" : "PRODUCES",
"PRODUCED" : "PRODUCES",
@@ -109,20 +109,20 @@ ALL_USAGE_TOKENS = {
"SOMETIMES_CONSUMES" : "SOMETIMES_CONSUMES",
"SOMETIMES_CONSUMED" : "SOMETIMES_CONSUMES",
"SOMETIME_CONSUMES" : "SOMETIMES_CONSUMES",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
PROTOCOL_USAGE_TOKENS = {
"TO_START" : "TO_START",
"BY_START" : "BY_START"
}
-
+
PROTOCOL_USAGE_TOKENS.update (ALL_USAGE_TOKENS)
-
+
#
# Dictionary of GUID type tokens
-#
-GUID_TYPE_TOKENS = {
+#
+GUID_TYPE_TOKENS = {
"Event" : "Event",
"File" : "File",
"FV" : "FV",
@@ -134,31 +134,31 @@ GUID_TYPE_TOKENS = {
"Hob:" : "HOB",
"SystemTable" : "SystemTable",
"TokenSpaceGuid" : "TokenSpaceGuid",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
-
+
#
# Dictionary of Protocol Notify tokens and their synonyms
-#
-PROTOCOL_NOTIFY_TOKENS = {
+#
+PROTOCOL_NOTIFY_TOKENS = {
"NOTIFY" : "NOTIFY",
"PROTOCOL_NOTIFY" : "NOTIFY",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
#
# Dictionary of PPI Notify tokens and their synonyms
-#
-PPI_NOTIFY_TOKENS = {
+#
+PPI_NOTIFY_TOKENS = {
"NOTIFY" : "NOTIFY",
"PPI_NOTIFY" : "NOTIFY",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
EVENT_TOKENS = {
"EVENT_TYPE_PERIODIC_TIMER" : "EVENT_TYPE_PERIODIC_TIMER",
"EVENT_TYPE_RELATIVE_TIMER" : "EVENT_TYPE_RELATIVE_TIMER",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
BOOTMODE_TOKENS = {
@@ -182,16 +182,16 @@ BOOTMODE_TOKENS = {
"RECOVERY_S4_RESUME" : "RECOVERY_S4_RESUME",
"RECOVERY_S5_RESUME" : "RECOVERY_S5_RESUME",
"RECOVERY_FLASH_UPDATE" : "RECOVERY_FLASH_UPDATE",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
-HOB_TOKENS = {
+HOB_TOKENS = {
"PHIT" : "PHIT",
"MEMORY_ALLOCATION" : "MEMORY_ALLOCATION",
"LOAD_PEIM" : "LOAD_PEIM",
"RESOURCE_DESCRIPTOR" : "RESOURCE_DESCRIPTOR",
"FIRMWARE_VOLUME" : "FIRMWARE_VOLUME",
- "UNDEFINED" : "UNDEFINED"
+ "UNDEFINED" : "UNDEFINED"
}
##
@@ -223,22 +223,22 @@ PCD_DIRVER_TYPE_LIST = ["PEI_PCD_DRIVER", "DXE_PCD_DRIVER"]
#
BOOT_MODE_LIST = ["FULL",
"MINIMAL",
- "NO_CHANGE",
- "DIAGNOSTICS",
- "DEFAULT",
+ "NO_CHANGE",
+ "DIAGNOSTICS",
+ "DEFAULT",
"S2_RESUME",
- "S3_RESUME",
- "S4_RESUME",
- "S5_RESUME",
+ "S3_RESUME",
+ "S4_RESUME",
+ "S5_RESUME",
"FLASH_UPDATE",
- "RECOVERY_FULL",
- "RECOVERY_MINIMAL",
+ "RECOVERY_FULL",
+ "RECOVERY_MINIMAL",
"RECOVERY_NO_CHANGE",
- "RECOVERY_DIAGNOSTICS",
+ "RECOVERY_DIAGNOSTICS",
"RECOVERY_DEFAULT",
- "RECOVERY_S2_RESUME",
+ "RECOVERY_S2_RESUME",
"RECOVERY_S3_RESUME",
- "RECOVERY_S4_RESUME",
+ "RECOVERY_S4_RESUME",
"RECOVERY_S5_RESUME",
"RECOVERY_FLASH_UPDATE"]
@@ -251,9 +251,9 @@ EVENT_TYPE_LIST = ["EVENT_TYPE_PERIODIC_TIMER",
##
# Hob Type List Items
#
-HOB_TYPE_LIST = ["PHIT",
+HOB_TYPE_LIST = ["PHIT",
"MEMORY_ALLOCATION",
- "RESOURCE_DESCRIPTOR",
+ "RESOURCE_DESCRIPTOR",
"FIRMWARE_VOLUME",
"LOAD_PEIM"]
@@ -290,19 +290,19 @@ BINARY_FILE_TYPE_LIST = ["PE32", "PIC", "TE", "DXE_DEPEX", "VER", "UI", "COMPAT1
BINARY_FILE_TYPE_LIST_IN_UDP = \
["GUID", "FREEFORM",
"UEFI_IMAGE", "PE32", "PIC",
- "PEI_DEPEX",
+ "PEI_DEPEX",
"DXE_DEPEX",
"SMM_DEPEX",
"FV", "TE",
- "BIN", "VER", "UI"
+ "BIN", "VER", "UI"
]
SUBTYPE_GUID_BINARY_FILE_TYPE = "FREEFORM"
##
-# Possible values for COMPONENT_TYPE, and their descriptions, are listed in
-# the table,
-# "Component (module) Types." For each component, the BASE_NAME and
-# COMPONENT_TYPE
+# Possible values for COMPONENT_TYPE, and their descriptions, are listed in
+# the table,
+# "Component (module) Types." For each component, the BASE_NAME and
+# COMPONENT_TYPE
# are required. The COMPONENT_TYPE definition is case sensitive.
#
COMPONENT_TYPE_LIST = [
@@ -436,7 +436,7 @@ BINARY_FILE_TYPE_FV = 'FV'
BINARY_FILE_TYPE_UI_LIST = [BINARY_FILE_TYPE_UNI_UI,
BINARY_FILE_TYPE_SEC_UI,
BINARY_FILE_TYPE_UI
- ]
+ ]
BINARY_FILE_TYPE_VER_LIST = [BINARY_FILE_TYPE_UNI_VER,
BINARY_FILE_TYPE_SEC_VER,
BINARY_FILE_TYPE_VER
@@ -712,7 +712,7 @@ TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
TAB_INF_DEFINES_PACKAGE = 'PACKAGE'
TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
-TAB_INF_DEFINES_VERSION = 'VERSION'
+TAB_INF_DEFINES_VERSION = 'VERSION'
TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
TAB_INF_DEFINES_TIANO_EDK1_FLASHMAP_H = 'TIANO_EDK1_FLASHMAP_H'
@@ -720,9 +720,9 @@ TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
-TAB_INF_DEFINES_PCI_VENDOR_ID = 'PCI_VENDOR_ID'
-TAB_INF_DEFINES_PCI_DEVICE_ID = 'PCI_DEVICE_ID'
-TAB_INF_DEFINES_PCI_CLASS_CODE = 'PCI_CLASS_CODE'
+TAB_INF_DEFINES_PCI_VENDOR_ID = 'PCI_VENDOR_ID'
+TAB_INF_DEFINES_PCI_DEVICE_ID = 'PCI_DEVICE_ID'
+TAB_INF_DEFINES_PCI_CLASS_CODE = 'PCI_CLASS_CODE'
TAB_INF_DEFINES_PCI_REVISION = 'PCI_REVISION'
TAB_INF_DEFINES_PCI_COMPRESS = 'PCI_COMPRESS'
TAB_INF_DEFINES_DEFINE = 'DEFINE'
@@ -819,12 +819,12 @@ TAB_IF_EXIST = '!if exist'
TAB_UNKNOWN = 'UNKNOWN'
#
-# Header section (virtual section for abstract, description, copyright,
+# Header section (virtual section for abstract, description, copyright,
# license)
#
TAB_HEADER = 'Header'
TAB_HEADER_ABSTRACT = 'Abstract'
-TAB_HEADER_DESCRIPTION = 'Description'
+TAB_HEADER_DESCRIPTION = 'Description'
TAB_HEADER_COPYRIGHT = 'Copyright'
TAB_HEADER_LICENSE = 'License'
TAB_BINARY_HEADER_IDENTIFIER = 'BinaryHeader'
@@ -833,7 +833,7 @@ TAB_BINARY_HEADER_USERID = 'TianoCore'
#
# Build database path
#
-DATABASE_PATH = ":memory:"
+DATABASE_PATH = ":memory:"
#
# used by ECC
#
@@ -855,7 +855,7 @@ TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
#
-# used to indicate the state of processing header comment section of dec,
+# used to indicate the state of processing header comment section of dec,
# inf files
#
HEADER_COMMENT_NOT_STARTED = -1
@@ -953,6 +953,6 @@ TOOL_FAMILY_LIST = ["MSFT",
TYPE_HOB_SECTION = 'HOB'
TYPE_EVENT_SECTION = 'EVENT'
-TYPE_BOOTMODE_SECTION = 'BOOTMODE'
+TYPE_BOOTMODE_SECTION = 'BOOTMODE'
PCD_ERR_CODE_MAX_SIZE = 4294967295
diff --git a/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py b/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py
index afa5b2407e..2e0253ab51 100644
--- a/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py
+++ b/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py
@@ -1,11 +1,11 @@
## @file
# This file is used to check PCD logical expression
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -47,7 +47,7 @@ def IsValidBareCString(String):
and (IntChar < 0x23 or IntChar > 0x7e):
return False
PreChar = Char
-
+
# Last char cannot be \ if PreChar is not \
if LastChar == '\\' and PreChar == LastChar:
return False
@@ -83,7 +83,7 @@ class _ExprBase:
self.Token = Token
self.Index = 0
self.Len = len(Token)
-
+
## SkipWhitespace
#
def SkipWhitespace(self):
@@ -91,14 +91,14 @@ class _ExprBase:
if Char not in ' \t':
break
self.Index += 1
-
+
## IsCurrentOp
#
- # @param OpList: option list
- #
+ # @param OpList: option list
+ #
def IsCurrentOp(self, OpList):
self.SkipWhitespace()
- LetterOp = ["EQ", "NE", "GE", "LE", "GT", "LT", "NOT", "and", "AND",
+ LetterOp = ["EQ", "NE", "GE", "LE", "GT", "LT", "NOT", "and", "AND",
"or", "OR", "XOR"]
OpMap = {
'|' : '|',
@@ -107,11 +107,11 @@ class _ExprBase:
'>' : '=',
'<' : '='
}
-
+
for Operator in OpList:
if not self.Token[self.Index:].startswith(Operator):
continue
-
+
self.Index += len(Operator)
Char = self.Token[self.Index : self.Index + 1]
@@ -119,36 +119,36 @@ class _ExprBase:
or (Operator in OpMap and OpMap[Operator] == Char):
self.Index -= len(Operator)
break
-
+
return True
-
+
return False
## _LogicalExpressionParser
#
# @param _ExprBase: _ExprBase object
-#
+#
class _LogicalExpressionParser(_ExprBase):
#
# STRINGITEM can only be logical field according to spec
#
STRINGITEM = -1
-
+
#
# Evaluate to True or False
#
LOGICAL = 0
REALLOGICAL = 2
-
+
#
# Just arithmetic expression
#
ARITH = 1
-
+
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.Parens = 0
-
+
def _CheckToken(self, MatchList):
for Match in MatchList:
if Match and Match.start() == 0:
@@ -156,7 +156,7 @@ class _LogicalExpressionParser(_ExprBase):
self.Token[self.Index:self.Index+Match.end()]
):
return False
-
+
self.Index += Match.end()
if self.Token[self.Index - 1] == '"':
return True
@@ -164,61 +164,61 @@ class _LogicalExpressionParser(_ExprBase):
self.Token[self.Index:self.Index+1].isalnum():
self.Index -= Match.end()
return False
-
+
Token = self.Token[self.Index - Match.end():self.Index]
if Token.strip() in ["EQ", "NE", "GE", "LE", "GT", "LT",
"NOT", "and", "AND", "or", "OR", "XOR"]:
self.Index -= Match.end()
return False
-
+
return True
-
+
return False
-
+
def IsAtomicNumVal(self):
#
# Hex number
#
Match1 = re.compile(self.HEX_PATTERN).match(self.Token[self.Index:])
-
+
#
# Number
#
Match2 = re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
-
+
#
# Macro
#
Match3 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
-
+
#
# PcdName
#
Match4 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
-
+
return self._CheckToken([Match1, Match2, Match3, Match4])
-
+
def IsAtomicItem(self):
#
# Macro
#
Match1 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
-
+
#
# PcdName
#
Match2 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
-
+
#
# Quoted string
#
Match3 = re.compile(self.QUOTED_PATTERN).\
match(self.Token[self.Index:].replace('\\\\', '//').\
replace('\\\"', '\\\''))
-
+
return self._CheckToken([Match1, Match2, Match3])
-
+
## A || B
#
def LogicalExpression(self):
@@ -233,12 +233,12 @@ class _LogicalExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.REALLOGICAL
return Ret
-
+
def SpecNot(self):
if self.IsCurrentOp(["NOT", "!", "not"]):
return self.SpecNot()
return self.Rel()
-
+
## A < B, A > B, A <= B, A >= B
#
def Rel(self):
@@ -252,7 +252,7 @@ class _LogicalExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.REALLOGICAL
return Ret
-
+
## A + B, A - B
#
def Expr(self):
@@ -269,7 +269,7 @@ class _LogicalExpressionParser(_ExprBase):
return Ret
## Factor
- #
+ #
def Factor(self):
if self.IsCurrentOp(["("]):
self.Parens += 1
@@ -279,7 +279,7 @@ class _LogicalExpressionParser(_ExprBase):
(self.Token, self.Token[self.Index:]))
self.Parens -= 1
return Ret
-
+
if self.IsAtomicItem():
if self.Token[self.Index - 1] == '"':
return self.STRINGITEM
@@ -289,7 +289,7 @@ class _LogicalExpressionParser(_ExprBase):
else:
raise _ExprError(ST.ERR_EXPR_FACTOR % \
(self.Token[self.Index:], self.Token))
-
+
## IsValidLogicalExpression
#
def IsValidLogicalExpression(self):
@@ -319,7 +319,7 @@ class _ValidRangeExpressionParser(_ExprBase):
self.INT = 2
self.IsParenHappen = False
self.IsLogicalOpHappen = False
-
+
## IsValidRangeExpression
#
def IsValidRangeExpression(self):
@@ -330,12 +330,12 @@ class _ValidRangeExpressionParser(_ExprBase):
return False, ST.ERR_EXPR_RANGE % self.Token
except _ExprError as XExcept:
return False, XExcept.Error
-
+
self.SkipWhitespace()
if self.Index != self.Len:
return False, (ST.ERR_EXPR_RANGE % self.Token)
return True, ''
-
+
## RangeExpression
#
def RangeExpression(self):
@@ -346,22 +346,22 @@ class _ValidRangeExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
self.IsParenHappen = False
Ret = self.Unary()
-
+
if self.IsCurrentOp(['XOR']):
Ret = self.Unary()
-
+
return Ret
-
+
## Unary
#
def Unary(self):
if self.IsCurrentOp(["NOT"]):
return self.Unary()
-
+
return self.ValidRange()
-
+
## ValidRange
- #
+ #
def ValidRange(self):
Ret = -1
if self.IsCurrentOp(["("]):
@@ -375,10 +375,10 @@ class _ValidRangeExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_RIGHT_PAREN % self.Token)
self.Parens -= 1
return Ret
-
+
if self.IsLogicalOpHappen:
raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
-
+
if self.IsCurrentOp(["LT", "GT", "LE", "GE", "EQ", "XOR"]):
IntMatch = \
re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
@@ -417,7 +417,7 @@ class _ValidListExpressionParser(_ExprBase):
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.NUM = 1
-
+
def IsValidListExpression(self):
if self.Len == 0:
return False, ST.ERR_EXPR_LIST_EMPTY
@@ -432,7 +432,7 @@ class _ValidListExpressionParser(_ExprBase):
return False, (ST.ERR_EXPR_LIST % self.Token)
return True, ''
-
+
def ListExpression(self):
Ret = -1
self.SkipWhitespace()
@@ -444,7 +444,7 @@ class _ValidListExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_LIST % self.Token)
return Ret
-
+
## _StringTestParser
#
class _StringTestParser(_ExprBase):
@@ -452,7 +452,7 @@ class _StringTestParser(_ExprBase):
_ExprBase.__init__(self, Token)
## IsValidStringTest
- #
+ #
def IsValidStringTest(self):
if self.Len == 0:
return False, ST.ERR_EXPR_EMPTY
@@ -463,7 +463,7 @@ class _StringTestParser(_ExprBase):
return True, ''
## StringItem
- #
+ #
def StringItem(self):
Match1 = re.compile(self.QUOTED_PATTERN)\
.match(self.Token[self.Index:].replace('\\\\', '//')\
@@ -489,7 +489,7 @@ class _StringTestParser(_ExprBase):
(self.Token, self.Token[self.Index:]))
## StringTest
- #
+ #
def StringTest(self):
self.StringItem()
if not self.IsCurrentOp(["==", "EQ", "!=", "NE"]):
@@ -538,7 +538,7 @@ def IsValidRangeExpr(Token):
##
# Check syntax of value list expression token
#
-# @param Token: value list expression token
+# @param Token: value list expression token
#
def IsValidListExpr(Token):
return _ValidListExpressionParser(Token).IsValidListExpression()
@@ -562,7 +562,7 @@ def IsValidFeatureFlagExp(Token, Flag=False):
if not Valid:
Valid, Cause = IsValidLogicalExpr(Token, Flag)
if not Valid:
- return False, Cause
+ return False, Cause
return True, ""
if __name__ == '__main__':
@@ -570,4 +570,4 @@ if __name__ == '__main__':
print(_LogicalExpressionParser('gCrownBayTokenSpaceGuid.PcdPciDevice1BridgeAddressLE0').IsValidLogicalExpression())
-
+
diff --git a/BaseTools/Source/Python/UPT/Library/GlobalData.py b/BaseTools/Source/Python/UPT/Library/GlobalData.py
index 1ae2417c2f..40b17cf083 100644
--- a/BaseTools/Source/Python/UPT/Library/GlobalData.py
+++ b/BaseTools/Source/Python/UPT/Library/GlobalData.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define common static strings and global data used by UPT
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -16,7 +16,7 @@ GlobalData
'''
#
-# The workspace directory
+# The workspace directory
#
gWORKSPACE = '.'
gPACKAGE_PATH = None
diff --git a/BaseTools/Source/Python/UPT/Library/Misc.py b/BaseTools/Source/Python/UPT/Library/Misc.py
index 28471d8123..8c2a6787f0 100644
--- a/BaseTools/Source/Python/UPT/Library/Misc.py
+++ b/BaseTools/Source/Python/UPT/Library/Misc.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -52,7 +52,7 @@ from Object.POM.CommonObject import TextObject
from Core.FileHook import __FileHookOpen__
from Common.MultipleWorkspace import MultipleWorkspace as mws
-## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C
+## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C
# structure style
#
# @param Guid: The GUID string
@@ -87,7 +87,7 @@ def CheckGuidRegFormat(GuidValue):
return False
-## Convert GUID string in C structure style to
+## Convert GUID string in C structure style to
# xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
#
# @param GuidValue: The GUID value in C structure format
@@ -151,12 +151,12 @@ def RemoveDirectory(Directory, Recursively=False):
## Store content in file
#
# This method is used to save file only when its content is changed. This is
-# quite useful for "make" system to decide what will be re-built and what
+# quite useful for "make" system to decide what will be re-built and what
# won't.
#
# @param File: The path of file
# @param Content: The new content of the file
-# @param IsBinaryFile: The flag indicating if the file is binary file
+# @param IsBinaryFile: The flag indicating if the file is binary file
# or not
#
def SaveFileOnChange(File, Content, IsBinaryFile=True):
@@ -216,7 +216,7 @@ def GetFiles(Root, SkipList=None, FullPath=True):
# @param FullPath: True if the returned file should be full path
# @param PrefixPath: the path that need to be added to the files found
# @return: the list of files found
-#
+#
def GetNonMetaDataFiles(Root, SkipList, FullPath, PrefixPath):
FileList = GetFiles(Root, SkipList, FullPath)
NewFileList = []
@@ -602,25 +602,25 @@ def GetWorkspace():
## Get relative path
#
# use full path and workspace to get relative path
-# the destination of this function is mainly to resolve the root path issue(like c: or c:\)
+# the destination of this function is mainly to resolve the root path issue(like c: or c:\)
#
# @param Fullpath: a string of fullpath
# @param Workspace: a string of workspace
#
def GetRelativePath(Fullpath, Workspace):
-
+
RelativePath = ''
if Workspace.endswith(os.sep):
RelativePath = Fullpath[Fullpath.upper().find(Workspace.upper())+len(Workspace):]
else:
RelativePath = Fullpath[Fullpath.upper().find(Workspace.upper())+len(Workspace)+1:]
-
+
return RelativePath
-
+
## Check whether all module types are in list
#
# check whether all module types (SUP_MODULE_LIST) are in list
-#
+#
# @param ModuleList: a list of ModuleType
#
def IsAllModuleList(ModuleList):
@@ -632,9 +632,9 @@ def IsAllModuleList(ModuleList):
return True
## Dictionary that use comment(GenericComment, TailComment) as value,
-# if a new comment which key already in the dic is inserted, then the
+# if a new comment which key already in the dic is inserted, then the
# comment will be merged.
-# Key is (Statement, SupArch), when TailComment is added, it will ident
+# Key is (Statement, SupArch), when TailComment is added, it will ident
# according to Statement
#
class MergeCommentDict(dict):
@@ -671,7 +671,7 @@ def GenDummyHelpTextObj():
# <Major> ::= (a-fA-F0-9){4}
# <Minor> ::= (a-fA-F0-9){4}
# <DecVersion> ::= (0-65535) ["." (0-99)]
-#
+#
# @param StringIn: The string contains version defined in INF file.
# It can be Decimal or Hex
#
@@ -764,7 +764,7 @@ def ConvertPath(Path):
## ConvertSpec
#
-# during install, convert the Spec string extract from UPD into INF allowable definition,
+# during install, convert the Spec string extract from UPD into INF allowable definition,
# the difference is period is allowed in the former (not the first letter) but not in the latter.
# return converted Spec string
#
@@ -787,7 +787,7 @@ def ConvertSpec(SpecStr):
# The rule is elements in List A are in List B and elements in List B are in List A.
#
# @param ListA, ListB Lists need to be judged.
-#
+#
# @return True ListA and ListB are identical
# @return False ListA and ListB are different with each other
#
@@ -808,10 +808,10 @@ def IsEqualList(ListA, ListB):
## ConvertArchList
#
# Convert item in ArchList if the start character is lower case.
-# In UDP spec, Arch is only allowed as: [A-Z]([a-zA-Z0-9])*
+# In UDP spec, Arch is only allowed as: [A-Z]([a-zA-Z0-9])*
#
# @param ArchList The ArchList need to be converted.
-#
+#
# @return NewList The ArchList been converted.
#
def ConvertArchList(ArchList):
@@ -835,7 +835,7 @@ def ConvertArchList(ArchList):
# If one line ends with a line extender, then it will be combined together with next line.
#
# @param LineList The LineList need to be processed.
-#
+#
# @return NewList The ArchList been processed.
#
def ProcessLineExtender(LineList):
@@ -854,11 +854,11 @@ def ProcessLineExtender(LineList):
## ProcessEdkComment
#
-# Process EDK style comment in LineList: c style /* */ comment or cpp style // comment
+# Process EDK style comment in LineList: c style /* */ comment or cpp style // comment
#
#
# @param LineList The LineList need to be processed.
-#
+#
# @return LineList The LineList been processed.
# @return FirstPos Where Edk comment is first found, -1 if not found
#
@@ -868,7 +868,7 @@ def ProcessEdkComment(LineList):
StartPos = -1
EndPos = -1
FirstPos = -1
-
+
while(Count < len(LineList)):
Line = LineList[Count].strip()
if Line.startswith("/*"):
@@ -886,7 +886,7 @@ def ProcessEdkComment(LineList):
FindEdkBlockComment = True
break
Count = Count + 1
-
+
if FindEdkBlockComment:
if FirstPos == -1:
FirstPos = StartPos
@@ -900,9 +900,9 @@ def ProcessEdkComment(LineList):
LineList[Count] = Line.replace("//", '#')
if FirstPos == -1:
FirstPos = Count
-
+
Count = Count + 1
-
+
return LineList, FirstPos
## GetLibInstanceInfo
@@ -994,13 +994,13 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo):
## GetLocalValue
#
# Generate the local value for INF and DEC file. If Lang attribute not present, then use this value.
-# If present, and there is no element without the Lang attribute, and one of the elements has the rfc1766 code is
-# "en-x-tianocore", or "en-US" if "en-x-tianocore" was not found, or "en" if "en-US" was not found, or startswith 'en'
+# If present, and there is no element without the Lang attribute, and one of the elements has the rfc1766 code is
+# "en-x-tianocore", or "en-US" if "en-x-tianocore" was not found, or "en" if "en-US" was not found, or startswith 'en'
# if 'en' was not found, then use this value.
# If multiple entries of a tag exist which have the same language code, use the last entry.
#
# @param ValueList A list need to be processed.
-# @param UseFirstValue: True to use the first value, False to use the last value
+# @param UseFirstValue: True to use the first value, False to use the last value
#
# @return LocalValue
def GetLocalValue(ValueList, UseFirstValue=False):
@@ -1040,7 +1040,7 @@ def GetLocalValue(ValueList, UseFirstValue=False):
Value5 = Value
else:
Value5 = Value
-
+
if Value1:
return Value1
if Value2:
@@ -1051,7 +1051,7 @@ def GetLocalValue(ValueList, UseFirstValue=False):
return Value4
if Value5:
return Value5
-
+
return ''
@@ -1088,29 +1088,29 @@ def GetCharIndexOutStr(CommentCharacter, Line):
#
# Check the UNI file path
#
-# @param FilePath: The UNI file path
+# @param FilePath: The UNI file path
#
def ValidateUNIFilePath(Path):
Suffix = Path[Path.rfind(TAB_SPLIT):]
-
+
#
- # Check if the suffix is one of the '.uni', '.UNI', '.Uni'
+ # Check if the suffix is one of the '.uni', '.UNI', '.Uni'
#
if Suffix not in TAB_UNI_FILE_SUFFIXS:
- Logger.Error("Unicode File Parser",
- ToolError.FORMAT_INVALID,
- Message=ST.ERR_UNI_FILE_SUFFIX_WRONG,
- ExtraData=Path)
-
+ Logger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNI_FILE_SUFFIX_WRONG,
+ ExtraData=Path)
+
#
# Check if '..' in the file name(without suffixe)
#
if (TAB_SPLIT + TAB_SPLIT) in Path:
- Logger.Error("Unicode File Parser",
- ToolError.FORMAT_INVALID,
- Message=ST.ERR_UNI_FILE_NAME_INVALID,
- ExtraData=Path)
-
+ Logger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNI_FILE_NAME_INVALID,
+ ExtraData=Path)
+
#
# Check if the file name is valid according to the DEC and INF specification
#
@@ -1118,8 +1118,8 @@ def ValidateUNIFilePath(Path):
FileName = Path.replace(Suffix, '')
InvalidCh = re.sub(Pattern, '', FileName)
if InvalidCh:
- Logger.Error("Unicode File Parser",
- ToolError.FORMAT_INVALID,
- Message=ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID,
- ExtraData=Path)
+ Logger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ Message=ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID,
+ ExtraData=Path)
diff --git a/BaseTools/Source/Python/UPT/Library/ParserValidate.py b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
index dc93cedd91..31b9b68cd5 100644
--- a/BaseTools/Source/Python/UPT/Library/ParserValidate.py
+++ b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -42,7 +42,7 @@ def __HexDigit(TempChar):
return True
else:
return False
-
+
## IsValidHex() method
#
# Whether char input is a Hex data.
@@ -59,7 +59,7 @@ def IsValidHex(HexStr):
return False
## Judge the input string is valid bool type or not.
-#
+#
# <TRUE> ::= {"TRUE"} {"true"} {"True"} {"0x1"} {"0x01"}
# <FALSE> ::= {"FALSE"} {"false"} {"False"} {"0x0"} {"0x00"}
# <BoolType> ::= {<TRUE>} {<FALSE>}
@@ -90,18 +90,18 @@ def IsValidBoolType(BoolString):
#
else:
return False
-
-## Is Valid Module Type List or not
-#
-# @param ModuleTypeList: A list contain ModuleType strings need to be
+
+## Is Valid Module Type List or not
+#
+# @param ModuleTypeList: A list contain ModuleType strings need to be
# judged.
#
def IsValidInfMoudleTypeList(ModuleTypeList):
for ModuleType in ModuleTypeList:
return IsValidInfMoudleType(ModuleType)
-## Is Valid Module Type or not
-#
+## Is Valid Module Type or not
+#
# @param ModuleType: A string contain ModuleType need to be judged.
#
def IsValidInfMoudleType(ModuleType):
@@ -110,8 +110,8 @@ def IsValidInfMoudleType(ModuleType):
else:
return False
-## Is Valid Component Type or not
-#
+## Is Valid Component Type or not
+#
# @param ComponentType: A string contain ComponentType need to be judged.
#
def IsValidInfComponentType(ComponentType):
@@ -124,7 +124,7 @@ def IsValidInfComponentType(ComponentType):
## Is valid Tool Family or not
#
# @param ToolFamily: A string contain Tool Family need to be judged.
-# Famlily := [A-Z]([a-zA-Z0-9])*
+# Famlily := [A-Z]([a-zA-Z0-9])*
#
def IsValidToolFamily(ToolFamily):
ReIsValieFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
@@ -148,13 +148,13 @@ def IsValidToolTagName(TagName):
return True
## Is valid arch or not
-#
+#
# @param Arch The arch string need to be validated
# <OA> ::= (a-zA-Z)(A-Za-z0-9){0,}
# <arch> ::= {"IA32"} {"X64"} {"IPF"} {"EBC"} {<OA>}
# {"common"}
# @param Arch: Input arch
-#
+#
def IsValidArch(Arch):
if Arch == 'common':
return True
@@ -164,55 +164,55 @@ def IsValidArch(Arch):
return True
## Is valid family or not
-#
+#
# <Family> ::= {"MSFT"} {"GCC"} {"INTEL"} {<Usr>} {"*"}
# <Usr> ::= [A-Z][A-Za-z0-9]{0,}
#
# @param family: The family string need to be validated
-#
+#
def IsValidFamily(Family):
Family = Family.strip()
if Family == '*':
return True
-
+
if Family == '':
return True
-
+
ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
if ReIsValidFamily.match(Family) is None:
return False
return True
## Is valid build option name or not
-#
+#
# @param BuildOptionName: The BuildOptionName string need to be validated
#
def IsValidBuildOptionName(BuildOptionName):
if not BuildOptionName:
return False
-
+
ToolOptionList = GetSplitValueList(BuildOptionName, '_', 4)
-
+
if len(ToolOptionList) != 5:
return False
-
+
ReIsValidBuildOption1 = re.compile(r"^\s*(\*)|([A-Z][a-zA-Z0-9]*)$")
ReIsValidBuildOption2 = re.compile(r"^\s*(\*)|([a-zA-Z][a-zA-Z0-9]*)$")
-
+
if ReIsValidBuildOption1.match(ToolOptionList[0]) is None:
return False
-
+
if ReIsValidBuildOption1.match(ToolOptionList[1]) is None:
return False
-
+
if ReIsValidBuildOption2.match(ToolOptionList[2]) is None:
return False
-
+
if ToolOptionList[3] == "*" and ToolOptionList[4] not in ['FAMILY', 'DLL', 'DPATH']:
return False
-
+
return True
-
+
## IsValidToken
#
# Check if pattern string matches total token
@@ -234,14 +234,14 @@ def IsValidToken(ReString, Token):
def IsValidPath(Path, Root):
Path = Path.strip()
OrigPath = Path.replace('\\', '/')
-
+
Path = os.path.normpath(Path).replace('\\', '/')
Root = os.path.normpath(Root).replace('\\', '/')
FullPath = mws.join(Root, Path)
-
+
if not os.path.exists(FullPath):
return False
-
+
#
# If Path is absolute path.
# It should be in Root.
@@ -263,16 +263,16 @@ def IsValidPath(Path, Root):
for Rel in ['/.', '/..', '/']:
if OrigPath.endswith(Rel):
return False
-
+
Path = Path.rstrip('/')
-
+
#
# Check relative path
#
for Word in Path.split('/'):
if not IsValidWord(Word):
return False
-
+
return True
## IsValidInstallPath
@@ -294,12 +294,12 @@ def IsValidInstallPath(Path):
return False
if Path.startswith('.'):
return False
-
+
if Path.find('..') != -1:
return False
-
+
return True
-
+
## IsValidCFormatGuid
#
@@ -309,14 +309,14 @@ def IsValidInstallPath(Path):
#
def IsValidCFormatGuid(Guid):
#
- # Valid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
+ # Valid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
# 0xaf, 0x48, 0xce }}
- # Invalid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
+ # Invalid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
# 0xaf, 0x48, 0xce }} 0x123
- # Invalid: { 0xf0b1 1735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
+ # Invalid: { 0xf0b1 1735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
# 0xaf, 0x48, 0xce }}
#
- List = ['{', 10, ',', 6, ',', 6, ',{', 4, ',', 4, ',', 4,
+ List = ['{', 10, ',', 6, ',', 6, ',{', 4, ',', 4, ',', 4,
',', 4, ',', 4, ',', 4, ',', 4, ',', 4, '}}']
Index = 0
Value = ''
@@ -337,14 +337,14 @@ def IsValidCFormatGuid(Guid):
if not Value.startswith('0x') and not Value.startswith('0X'):
return False
-
+
#
# Index may out of bound
#
if not isinstance(List[Index], type(1)) or \
len(Value) > List[Index] or len(Value) < 3:
return False
-
+
#
# Check if string can be converted to integer
# Throw exception if not
@@ -367,23 +367,23 @@ def IsValidCFormatGuid(Guid):
# Check whether the PCD type is valid
#
# @param PcdTypeString: The PcdType string need to be checked.
-#
+#
def IsValidPcdType(PcdTypeString):
if PcdTypeString.upper() in PCD_USAGE_TYPE_LIST_OF_MODULE:
return True
else:
return False
-
+
## IsValidWord
#
# Check whether the word is valid.
-# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
-# optional
-# dash "-" and/or underscore "_" characters. No whitespace
+# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
+# optional
+# dash "-" and/or underscore "_" characters. No whitespace
# characters are permitted.
-#
+#
# @param Word: The word string need to be checked.
-#
+#
def IsValidWord(Word):
if not Word:
return False
@@ -394,9 +394,9 @@ def IsValidWord(Word):
not Word[0] == '_' and \
not Word[0].isdigit():
return False
-
+
LastChar = ''
- for Char in Word[1:]:
+ for Char in Word[1:]:
if (not Char.isalpha()) and \
(not Char.isdigit()) and \
Char != '-' and \
@@ -406,82 +406,82 @@ def IsValidWord(Word):
if Char == '.' and LastChar == '.':
return False
LastChar = Char
-
+
return True
## IsValidSimpleWord
#
# Check whether the SimpleWord is valid.
-# <SimpleWord> ::= (a-zA-Z0-9)(a-zA-Z0-9_-){0,}
+# <SimpleWord> ::= (a-zA-Z0-9)(a-zA-Z0-9_-){0,}
# A word that cannot contain a period character.
-#
+#
# @param Word: The word string need to be checked.
-#
+#
def IsValidSimpleWord(Word):
ReIsValidSimpleWord = \
re.compile(r"^[0-9A-Za-z][0-9A-Za-z\-_]*$", re.DOTALL)
Word = Word.strip()
if not Word:
return False
-
+
if not ReIsValidSimpleWord.match(Word):
return False
-
+
return True
## IsValidDecVersion
#
# Check whether the decimal version is valid.
# <DecVersion> ::= (0-9){1,} ["." (0-9){1,}]
-#
+#
# @param Word: The word string need to be checked.
-#
+#
def IsValidDecVersion(Word):
if Word.find('.') > -1:
ReIsValidDecVersion = re.compile(r"[0-9]+\.?[0-9]+$")
else:
ReIsValidDecVersion = re.compile(r"[0-9]+$")
if ReIsValidDecVersion.match(Word) is None:
- return False
+ return False
return True
-
+
## IsValidHexVersion
#
# Check whether the hex version is valid.
# <HexVersion> ::= "0x" <Major> <Minor>
# <Major> ::= <HexDigit>{4}
# <Minor> ::= <HexDigit>{4}
-#
+#
# @param Word: The word string need to be checked.
-#
+#
def IsValidHexVersion(Word):
ReIsValidHexVersion = re.compile(r"[0][xX][0-9A-Fa-f]{8}$", re.DOTALL)
if ReIsValidHexVersion.match(Word) is None:
return False
-
+
return True
## IsValidBuildNumber
#
# Check whether the BUILD_NUMBER is valid.
# ["BUILD_NUMBER" "=" <Integer>{1,4} <EOL>]
-#
+#
# @param Word: The BUILD_NUMBER string need to be checked.
-#
+#
def IsValidBuildNumber(Word):
ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
if ReIsValieBuildNumber.match(Word) is None:
return False
-
+
return True
## IsValidDepex
#
# Check whether the Depex is valid.
-#
+#
# @param Word: The Depex string need to be checked.
-#
+#
def IsValidDepex(Word):
Index = Word.upper().find("PUSH")
if Index > -1:
@@ -490,12 +490,12 @@ def IsValidDepex(Word):
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
if ReIsValidCName.match(Word) is None:
return False
-
+
return True
## IsValidNormalizedString
#
-# Check
+# Check
# <NormalizedString> ::= <DblQuote> [{<Word>} {<Space>}]{1,} <DblQuote>
# <Space> ::= 0x20
#
@@ -504,31 +504,31 @@ def IsValidDepex(Word):
def IsValidNormalizedString(String):
if String == '':
return True
-
+
for Char in String:
if Char == '\t':
return False
-
+
StringList = GetSplitValueList(String, TAB_SPACE_SPLIT)
-
+
for Item in StringList:
if not Item:
continue
if not IsValidWord(Item):
return False
-
+
return True
## IsValidIdString
#
# Check whether the IdString is valid.
-#
+#
# @param IdString: The IdString need to be checked.
-#
+#
def IsValidIdString(String):
if IsValidSimpleWord(String.strip()):
return True
-
+
if String.strip().startswith('"') and \
String.strip().endswith('"'):
String = String[1:-1]
@@ -536,7 +536,7 @@ def IsValidIdString(String):
return True
if IsValidNormalizedString(String):
return True
-
+
return False
## IsValidVersionString
@@ -546,52 +546,52 @@ def IsValidIdString(String):
# <WhiteSpace> ::= {<Tab>} {<Space>}
# <Tab> ::= 0x09
# <Space> ::= 0x20
-# <AsciiChars> ::= (0x21 - 0x7E)
-#
+# <AsciiChars> ::= (0x21 - 0x7E)
+#
# @param VersionString: The VersionString need to be checked.
-#
+#
def IsValidVersionString(VersionString):
VersionString = VersionString.strip()
for Char in VersionString:
if not (Char >= 0x21 and Char <= 0x7E):
return False
-
+
return True
## IsValidPcdValue
#
# Check whether the PcdValue is valid.
-#
+#
# @param VersionString: The PcdValue need to be checked.
-#
+#
def IsValidPcdValue(PcdValue):
for Char in PcdValue:
if Char == '\n' or Char == '\t' or Char == '\f':
return False
-
+
#
# <Boolean>
#
if IsValidFeatureFlagExp(PcdValue, True)[0]:
return True
-
+
#
# <Number> ::= {<Integer>} {<HexNumber>}
# <Integer> ::= {(0-9)} {(1-9)(0-9){1,}}
# <HexNumber> ::= "0x" <HexDigit>{1,}
# <HexDigit> ::= (a-fA-F0-9)
- #
+ #
if IsValidHex(PcdValue):
return True
-
+
ReIsValidIntegerSingle = re.compile(r"^\s*[0-9]\s*$", re.DOTALL)
if ReIsValidIntegerSingle.match(PcdValue) is not None:
return True
-
- ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
+
+ ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
if ReIsValidIntegerMulti.match(PcdValue) is not None:
return True
-
+
#
# <StringVal> ::= {<StringType>} {<Array>} {"$(" <MACRO> ")"}
# <StringType> ::= {<UnicodeString>} {<CString>}
@@ -609,7 +609,7 @@ def IsValidPcdValue(PcdValue):
IsTrue = True
if IsTrue:
return IsTrue
-
+
#
# <Array> ::= {<CArray>} {<NList>} {<CFormatGUID>}
# <CArray> ::= "{" [<NList>] <CArray>{0,} "}"
@@ -619,44 +619,44 @@ def IsValidPcdValue(PcdValue):
#
if IsValidCFormatGuid(PcdValue):
return True
-
+
ReIsValidByteHex = re.compile(r"^\s*0x[0-9a-fA-F]{1,2}\s*$", re.DOTALL)
if PcdValue.strip().startswith('{') and PcdValue.strip().endswith('}') :
StringValue = PcdValue.strip().lstrip('{').rstrip('}')
ValueList = StringValue.split(',')
AllValidFlag = True
- for ValueItem in ValueList:
+ for ValueItem in ValueList:
if not ReIsValidByteHex.match(ValueItem.strip()):
AllValidFlag = False
-
+
if AllValidFlag:
return True
-
- #
+
+ #
# NList
#
AllValidFlag = True
ValueList = PcdValue.split(',')
- for ValueItem in ValueList:
+ for ValueItem in ValueList:
if not ReIsValidByteHex.match(ValueItem.strip()):
AllValidFlag = False
-
+
if AllValidFlag:
return True
-
+
return False
## IsValidCVariableName
#
# Check whether the PcdValue is valid.
-#
+#
# @param VersionString: The PcdValue need to be checked.
-#
+#
def IsValidCVariableName(CName):
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
if ReIsValidCName.match(CName) is None:
return False
-
+
return True
## IsValidIdentifier
@@ -671,7 +671,7 @@ def IsValidIdentifier(Ident):
ReIdent = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
if ReIdent.match(Ident) is None:
return False
-
+
return True
## IsValidDecVersionVal
@@ -682,10 +682,10 @@ def IsValidIdentifier(Ident):
#
def IsValidDecVersionVal(Ver):
ReVersion = re.compile(r"[0-9]+(\.[0-9]{1,2})$")
-
+
if ReVersion.match(Ver) is None:
return False
-
+
return True
@@ -699,7 +699,7 @@ def IsValidLibName(LibName):
ReLibName = re.compile("^[A-Z]+[a-zA-Z0-9]*$")
if not ReLibName.match(LibName):
return False
-
+
return True
# IsValidUserId
diff --git a/BaseTools/Source/Python/UPT/Library/Parsing.py b/BaseTools/Source/Python/UPT/Library/Parsing.py
index 22030e7587..5c4666399e 100644
--- a/BaseTools/Source/Python/UPT/Library/Parsing.py
+++ b/BaseTools/Source/Python/UPT/Library/Parsing.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define common parsing related functions used in parsing
+# This file is used to define common parsing related functions used in parsing
# INF/DEC/DSC process
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -74,7 +74,7 @@ def GetBuildOption(String, File, LineNo= -1):
# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
#
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
-# @param ContainerFile: The file which describes the library class, used for
+# @param ContainerFile: The file which describes the library class, used for
# error report
#
def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo= -1):
@@ -99,7 +99,7 @@ def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo= -1):
# [|<TokenSpaceGuidCName>.<PcdCName>]
#
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
-# @param ContainerFile: The file which describes the library class, used for
+# @param ContainerFile: The file which describes the library class, used for
# error report
#
def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
@@ -148,7 +148,7 @@ def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1):
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
# <Value>[|<Type>|<MaximumDatumSize>]
-# @param ContainerFile: The file which describes the pcd, used for error
+# @param ContainerFile: The file which describes the pcd, used for error
# report
#
@@ -176,7 +176,7 @@ def GetPcd(Item, Type, ContainerFile, LineNo= -1):
#
# @param Item: String as <PcdTokenSpaceGuidCName>
# .<TokenCName>|TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
+# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo= -1):
@@ -200,7 +200,7 @@ def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo= -1):
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
# TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
+# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo= -1):
@@ -226,7 +226,7 @@ def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo= -1):
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
# TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
+# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo= -1):
@@ -253,7 +253,7 @@ def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo= -1):
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>
# |TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
+# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo= -1):
@@ -273,7 +273,7 @@ def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo= -1):
## GetComponent
#
# Parse block of the components defined in dsc file
-# Set KeyValues as [ ['component name', [lib1, lib2, lib3],
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3],
# [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
#
# @param Lines: The content to be parsed
@@ -408,7 +408,7 @@ def GetExec(String):
## GetComponents
#
# Parse block of the components defined in dsc file
-# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3],
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3],
# [pcd1, pcd2, pcd3]], ...]
#
# @param Lines: The content to be parsed
@@ -531,7 +531,7 @@ def GetComponents(Lines, KeyValues, CommentCharacter):
#
# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>
# [|<PcdFeatureFlag>]]]]
-# @param ContainerFile: The file which describes the library class, used
+# @param ContainerFile: The file which describes the library class, used
# for error report
#
def GetSource(Item, ContainerFile, FileRelativePath, LineNo= -1):
@@ -556,7 +556,7 @@ def GetSource(Item, ContainerFile, FileRelativePath, LineNo= -1):
#
# @param Item: String as <Filename>[|<Family>[|<TagName>
# [|<ToolCode>[|<PcdFeatureFlag>]]]]
-# @param ContainerFile: The file which describes the library class,
+# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetBinary(Item, ContainerFile, LineNo= -1):
@@ -580,7 +580,7 @@ def GetBinary(Item, ContainerFile, LineNo= -1):
#
# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
# @param Type: Type of parsing string
-# @param ContainerFile: The file which describes the library class,
+# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetGuidsProtocolsPpisOfInf(Item):
@@ -594,7 +594,7 @@ def GetGuidsProtocolsPpisOfInf(Item):
#
# @param Item: String as <GuidCName>=<GuidValue>
# @param Type: Type of parsing string
-# @param ContainerFile: The file which describes the library class,
+# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo= -1):
@@ -625,7 +625,7 @@ def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo= -1):
#
# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
# @param Type: Type of parsing string
-# @param ContainerFile: The file which describes the library class,
+# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetPackage(Item, ContainerFile, FileRelativePath, LineNo= -1):
@@ -936,7 +936,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
# <Value> ::= {<NumVal>} {<Boolean>} {<AsciiString>} {<GUID>}
# {<CString>} {<UnicodeString>} {<CArray>}
#
- # The definition of <NumVal>, <PATH>, <Boolean>, <GUID>, <CString>,
+ # The definition of <NumVal>, <PATH>, <Boolean>, <GUID>, <CString>,
# <UnicodeString>, <CArray> are subset of <AsciiString>.
#
ReIsValidMacroValue = re.compile(r"^[\x20-\x7e]*$", re.DOTALL)
@@ -950,15 +950,15 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
return Name, Value
-## GenSection
+## GenSection
#
# generate section contents
#
-# @param SectionName: indicate the name of the section, details refer to
+# @param SectionName: indicate the name of the section, details refer to
# INF, DEC specs
-# @param SectionDict: section statement dict, key is SectionAttrs(arch,
-# moduletype or platform may exist as needed) list
-# seperated by space,
+# @param SectionDict: section statement dict, key is SectionAttrs(arch,
+# moduletype or platform may exist as needed) list
+# seperated by space,
# value is statement
#
def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
@@ -1004,10 +1004,10 @@ def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
return Content
## ConvertArchForInstall
-# if Arch.upper() is in "IA32", "X64", "IPF", and "EBC", it must be upper case. "common" must be lower case.
+# if Arch.upper() is in "IA32", "X64", "IPF", and "EBC", it must be upper case. "common" must be lower case.
# Anything else, the case must be preserved
#
-# @param Arch: the arch string that need to be converted, it should be stripped before pass in
+# @param Arch: the arch string that need to be converted, it should be stripped before pass in
# @return: the arch string that get converted
#
def ConvertArchForInstall(Arch):
diff --git a/BaseTools/Source/Python/UPT/Library/UniClassObject.py b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
index a464cbf702..670cf3b4ee 100644
--- a/BaseTools/Source/Python/UPT/Library/UniClassObject.py
+++ b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -148,9 +148,9 @@ def GetLanguageCode1766(LangName, File=None):
if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()):
return LangName
else:
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
- "Invalid RFC 1766 language code : %s" % LangName,
+ "Invalid RFC 1766 language code : %s" % LangName,
File)
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
@@ -167,11 +167,11 @@ def GetLanguageCode1766(LangName, File=None):
if Key == LangName[0:3].lower():
return Key
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
- "Invalid RFC 4646 language code : %s" % LangName,
+ "Invalid RFC 4646 language code : %s" % LangName,
File)
-
+
## GetLanguageCode
#
# Check the language code read from .UNI file and convert RFC 1766 codes to RFC 4646 codes if appropriate
@@ -191,9 +191,9 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
return TempLangName
return LangName
else:
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
- "Invalid RFC 1766 language code : %s" % LangName,
+ "Invalid RFC 1766 language code : %s" % LangName,
File)
if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
return LangName
@@ -212,9 +212,9 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
- "Invalid RFC 4646 language code : %s" % LangName,
+ "Invalid RFC 4646 language code : %s" % LangName,
File)
## FormatUniEntry
@@ -231,7 +231,7 @@ def FormatUniEntry(StrTokenName, TokenValueList, ContainerFile):
PreFormatLength = 40
if len(StrTokenName) > PreFormatLength:
PreFormatLength = len(StrTokenName) + 1
- for (Lang, Value) in TokenValueList:
+ for (Lang, Value) in TokenValueList:
if not Value or Lang == DT.TAB_LANGUAGE_EN_X:
continue
if Lang == '':
@@ -333,15 +333,15 @@ class UniFileClassObject(object):
except UnicodeError as Xstr:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16_le').readlines()
except:
- EdkLogger.Error("Unicode File Parser",
- ToolError.FILE_OPEN_FAILURE,
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FILE_OPEN_FAILURE,
"File read failure: %s" % str(Xstr),
ExtraData=File)
LineNo = GetLineNo(FileIn, Line, False)
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.PARSER_ERROR,
- "Wrong language definition",
- ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line,
+ "Wrong language definition",
+ ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line,
File = File, Line = LineNo)
else:
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
@@ -365,7 +365,7 @@ class UniFileClassObject(object):
if not IsLangInDef:
#
# The found STRING tokens will be added into new language string list
- # so that the unique STRING identifier is reserved for all languages in the package list.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
#
FirstLangName = self.LanguageDef[0][0]
if LangName != FirstLangName:
@@ -375,10 +375,10 @@ class UniFileClassObject(object):
OtherLang = Item.UseOtherLangDef
else:
OtherLang = FirstLangName
- self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName,
- '',
- Item.Referenced,
- Item.Token,
+ self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName,
+ '',
+ Item.Referenced,
+ Item.Token,
OtherLang))
self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
return True
@@ -395,7 +395,7 @@ class UniFileClassObject(object):
if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString is None or MatchString.end(0) != len(Name):
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
'The string token name %s in UNI file %s must be upper case character.' %(Name, self.File))
LanguageList = Item.split(u'#language ')
@@ -406,7 +406,7 @@ class UniFileClassObject(object):
Language = LanguageList[IndexI].split()[0]
#.replace(u'\r\n', u'')
Value = \
- LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')]
+ LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')]
Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
self.AddStringToList(Name, Language, Value)
@@ -424,7 +424,7 @@ class UniFileClassObject(object):
#
def PreProcess(self, File, IsIncludeFile=False):
if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FILE_NOT_FOUND,
ExtraData=File.Path)
@@ -443,8 +443,8 @@ class UniFileClassObject(object):
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16_le').readlines()
except:
EdkLogger.Error("Unicode File Parser", ToolError.FILE_OPEN_FAILURE, ExtraData=File.Path)
-
-
+
+
#
# get the file header
#
@@ -467,7 +467,7 @@ class UniFileClassObject(object):
if Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and HeaderStart and not HeaderEnd and FirstGenHeader:
self.UniFileHeader += Line + '\r\n'
continue
-
+
#
# Use unique identifier
#
@@ -486,7 +486,7 @@ class UniFileClassObject(object):
Line = Line.strip()
#
# Ignore comment line and empty line
- #
+ #
if Line == u'' or Line.startswith(u'//'):
#
# Change the single line String entry flag status
@@ -528,7 +528,7 @@ class UniFileClassObject(object):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
else:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
-
+
Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
@@ -556,16 +556,16 @@ class UniFileClassObject(object):
#
if Line.startswith(u'#string') and Line.find(u'#language') == -1:
MultiLineFeedExits = True
-
+
if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.find(u'"') < 0:
MultiLineFeedExits = True
-
+
#
# Between Language entry and String entry can not contain line feed
#
if Line.startswith(u'#language') and len(Line.split()) == 2:
MultiLineFeedExits = True
-
+
#
# Between two String entry, can not contain line feed
#
@@ -588,7 +588,7 @@ class UniFileClassObject(object):
StringEntryExistsFlag = 0
Lines.append(Line)
-
+
#
# Convert string def format as below
#
@@ -601,11 +601,11 @@ class UniFileClassObject(object):
# "Mi segunda secuencia 1"
# "Mi segunda secuencia 2"
#
-
+
if not IsIncludeFile and not Lines:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_NO_SECTION_EXIST, \
- ExtraData=File.Path)
+ ExtraData=File.Path)
NewLines = []
StrName = u''
@@ -615,7 +615,7 @@ class UniFileClassObject(object):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_STRNAME_FORMAT_ERROR % StrName.split()[1], \
ExtraData=File.Path)
-
+
if StrName and len(StrName.split()[1].split(DT.TAB_UNDERLINE_SPLIT)) == 4:
StringTokenList = StrName.split()[1].split(DT.TAB_UNDERLINE_SPLIT)
if (StringTokenList[3].upper() in [DT.TAB_STR_TOKENPROMPT, DT.TAB_STR_TOKENHELP] and \
@@ -624,19 +624,19 @@ class UniFileClassObject(object):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR % StrName.split()[1], \
ExtraData=File.Path)
-
+
if Line.count(u'#language') > 1:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_SEP_LANGENTRY_LINE % Line, \
- ExtraData=File.Path)
-
+ ExtraData=File.Path)
+
if Line.startswith(u'//'):
continue
elif Line.startswith(u'#langdef'):
if len(Line.split()) == 2:
NewLines.append(Line)
continue
- elif len(Line.split()) > 2 and Line.find(u'"') > 0:
+ elif len(Line.split()) > 2 and Line.find(u'"') > 0:
NewLines.append(Line[:Line.find(u'"')].strip())
NewLines.append(Line[Line.find(u'"'):])
else:
@@ -659,10 +659,10 @@ class UniFileClassObject(object):
if Line[Line.find(u'#language')-1] != ' ' or \
Line[Line.find(u'#language')+len(u'#language')] != u' ':
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
-
+
if Line.find(u'"') > 0:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
-
+
StrName = Line.split()[0] + u' ' + Line.split()[1]
if StrName:
if StrName.split()[1] not in ExistStrNameList:
@@ -684,11 +684,11 @@ class UniFileClassObject(object):
if Line[Line.find(u'#language')-1] != u' ' or \
Line[Line.find(u'#language')+len(u'#language')] != u' ':
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
-
+
if Line[Line.find(u'"')-1] != u' ':
- EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
-
- StrName = Line.split()[0] + u' ' + Line.split()[1]
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ StrName = Line.split()[0] + u' ' + Line.split()[1]
if StrName:
if StrName.split()[1] not in ExistStrNameList:
ExistStrNameList.append(StrName.split()[1].strip())
@@ -698,11 +698,11 @@ class UniFileClassObject(object):
DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
- ExtraData=File.Path)
+ ExtraData=File.Path)
if IsIncludeFile:
if StrName not in NewLines:
NewLines.append((Line[:Line.find(u'#language')]).strip())
- else:
+ else:
NewLines.append((Line[:Line.find(u'#language')]).strip())
NewLines.append((Line[Line.find(u'#language'):Line.find(u'"')]).strip())
NewLines.append((Line[Line.find(u'"'):]).strip())
@@ -733,17 +733,17 @@ class UniFileClassObject(object):
else:
print(Line)
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
-
+
if StrName and not StrName.split()[1].startswith(u'STR_'):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_STRNAME_FORMAT_ERROR % StrName.split()[1], \
- ExtraData=File.Path)
-
+ ExtraData=File.Path)
+
if StrName and not NewLines:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNI_MISS_LANGENTRY % StrName, \
ExtraData=File.Path)
-
+
#
# Check Abstract, Description, BinaryAbstract and BinaryDescription order,
# should be Abstract, Description, BinaryAbstract, BinaryDesctiption
@@ -762,7 +762,7 @@ class UniFileClassObject(object):
BinaryDescriptionPosition = ExistStrNameList.index(StrName)
else:
DescriptionPosition = ExistStrNameList.index(StrName)
-
+
OrderList = sorted([AbstractPosition, DescriptionPosition])
BinaryOrderList = sorted([BinaryAbstractPosition, BinaryDescriptionPosition])
Min = OrderList[0]
@@ -774,25 +774,25 @@ class UniFileClassObject(object):
BinaryMax > Max):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
- ExtraData=File.Path)
+ ExtraData=File.Path)
elif BinaryAbstractPosition > -1:
if not(BinaryAbstractPosition > Max):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
- ExtraData=File.Path)
-
+ ExtraData=File.Path)
+
if DescriptionPosition > -1:
if not(DescriptionPosition == Max and AbstractPosition == Min and \
DescriptionPosition > AbstractPosition):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
- ExtraData=File.Path)
-
+ ExtraData=File.Path)
+
if not self.UniFileHeader:
- EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
Message = ST.ERR_NO_SOURCE_HEADER,
ExtraData=File.Path)
-
+
return NewLines
#
@@ -800,13 +800,13 @@ class UniFileClassObject(object):
#
def LoadUniFile(self, File = None):
if File is None:
- EdkLogger.Error("Unicode File Parser",
- ToolError.PARSER_ERROR,
- Message='No unicode file is given',
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.PARSER_ERROR,
+ Message='No unicode file is given',
ExtraData=File.Path)
-
+
self.File = File
-
+
#
# Process special char in file
#
@@ -849,10 +849,10 @@ class UniFileClassObject(object):
SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
if Line.find('"') > 0 or SecondLine.find('"') > 0:
- EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
Message=ST.ERR_UNIPARSE_DBLQUOTE_UNMATCHED,
ExtraData=File.Path)
-
+
Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
for IndexJ in range(IndexI + 2, len(Lines)):
@@ -894,11 +894,11 @@ class UniFileClassObject(object):
for LangNameItem in self.LanguageDef:
if Language == LangNameItem[0]:
break
-
+
if Language not in self.OrderedStringList:
self.OrderedStringList[Language] = []
self.OrderedStringDict[Language] = {}
-
+
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
@@ -906,38 +906,38 @@ class UniFileClassObject(object):
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
- Item.UseOtherLangDef = ''
+ Item.UseOtherLangDef = ''
if IsAdded:
Token = len(self.OrderedStringList[Language])
if Index == -1:
- self.OrderedStringList[Language].append(StringDefClassObject(Name,
- Value,
- Referenced,
- Token,
+ self.OrderedStringList[Language].append(StringDefClassObject(Name,
+ Value,
+ Referenced,
+ Token,
UseOtherLangDef))
self.OrderedStringDict[Language][Name] = Token
for LangName in self.LanguageDef:
#
# New STRING token will be added into all language string lists.
- # so that the unique STRING identifier is reserved for all languages in the package list.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
#
if LangName[0] != Language:
if UseOtherLangDef != '':
OtherLangDef = UseOtherLangDef
else:
OtherLangDef = Language
- self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name,
- '',
- Referenced,
- Token,
+ self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name,
+ '',
+ Referenced,
+ Token,
OtherLangDef))
self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
else:
- self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name,
- Value,
- Referenced,
- Token,
+ self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name,
+ Value,
+ Referenced,
+ Token,
UseOtherLangDef))
self.OrderedStringDict[Language][Name] = Index
@@ -1029,16 +1029,16 @@ class UniFileClassObject(object):
print(Item)
for Member in self.OrderedStringList[Item]:
print(str(Member))
-
+
#
- # Read content from '!include' UNI file
+ # Read content from '!include' UNI file
#
def ReadIncludeUNIfile(self, FilaPath):
if self.File:
pass
-
+
if not os.path.exists(FilaPath) or not os.path.isfile(FilaPath):
- EdkLogger.Error("Unicode File Parser",
+ EdkLogger.Error("Unicode File Parser",
ToolError.FILE_NOT_FOUND,
ExtraData=FilaPath)
try:
diff --git a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
index dbaee678af..ee158f33d9 100644
--- a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
+++ b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
@@ -2,11 +2,11 @@
# This is an XML API that uses a syntax similar to XPath, but it is written in
# standard python so that no extra python packages are required to use it.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -141,8 +141,8 @@ def XmlElement(Dom, String):
## Get a single XML element using XPath style syntax.
#
# Similar with XmlElement, but do not strip all the leading and tailing space
-# and newline, instead just remove the newline and spaces introduced by
-# toprettyxml()
+# and newline, instead just remove the newline and spaces introduced by
+# toprettyxml()
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
diff --git a/BaseTools/Source/Python/UPT/Library/Xml/__init__.py b/BaseTools/Source/Python/UPT/Library/Xml/__init__.py
index f09eece5fb..e8283c03cb 100644
--- a/BaseTools/Source/Python/UPT/Library/Xml/__init__.py
+++ b/BaseTools/Source/Python/UPT/Library/Xml/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Xml
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/Library/__init__.py b/BaseTools/Source/Python/UPT/Library/__init__.py
index 6a98cd80a3..3d52523b97 100644
--- a/BaseTools/Source/Python/UPT/Library/__init__.py
+++ b/BaseTools/Source/Python/UPT/Library/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Library
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/Logger/Log.py b/BaseTools/Source/Python/UPT/Logger/Log.py
index ae06a1ae2a..e8c31f0072 100644
--- a/BaseTools/Source/Python/UPT/Logger/Log.py
+++ b/BaseTools/Source/Python/UPT/Logger/Log.py
@@ -1,11 +1,11 @@
## @file
# This file implements the log mechanism for Python tools.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -197,8 +197,8 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
## Log ERROR message
#
-# Once an error messages is logged, the tool's execution will be broken by
-# raising an execption. If you don't want to break the execution later, you
+# Once an error messages is logged, the tool's execution will be broken by
+# raising an execption. If you don't want to break the execution later, you
# can give "RaiseError" with "False" value.
#
# @param ToolName The name of the tool. If not given, the name of caller
@@ -248,13 +248,13 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
if RaiseError:
raise FatalError(ErrorCode)
-
+
## Initialize log system
#
def Initialize():
#
- # Since we use different format to log different levels of message into
- # different place (stdout or stderr), we have to use different "Logger"
+ # Since we use different format to log different levels of message into
+ # different place (stdout or stderr), we have to use different "Logger"
# objects to do this.
#
# For DEBUG level (All DEBUG_0~9 are applicable)
diff --git a/BaseTools/Source/Python/UPT/Logger/StringTable.py b/BaseTools/Source/Python/UPT/Logger/StringTable.py
index 83ae0ae2f3..c1c7732b40 100644
--- a/BaseTools/Source/Python/UPT/Logger/StringTable.py
+++ b/BaseTools/Source/Python/UPT/Logger/StringTable.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define strings used in the UPT tool
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -118,7 +118,7 @@ ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR = \
ERR_INF_PARSER_DEPEX_SECTION_CONTENT_MISSING = \
_("Missing content in: %s")
ERR_INF_PARSER_DEPEX_SECTION_CONTENT_ERROR = \
- _("The [Depex] section contains invalid content: %s")
+ _("The [Depex] section contains invalid content: %s")
ERR_INF_PARSER_DEPEX_SECTION_SEC_TYPE_ERROR = \
_("The format is incorrect. The section type keyword of the content in the"
" [Depex] section is only for 'PEI_DEPEX', 'DXE_DEPEX', 'SMM_DEPEX', "
@@ -140,11 +140,11 @@ ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR = \
ERR_INF_PARSER_PCD_SECTION_TYPE_ERROR = \
_("The PCD section type is incorrect. The value should be this list: %s")
ERR_INF_PARSER_PCD_SECTION_CONTENT_ERROR = \
- _("PcdName format invalid."
+ _("PcdName format invalid."
"Should like following: PcdName | Value | FeatureFlag.")
ERR_INF_PARSER_PCD_NAME_FORMAT_ERROR = \
- _("Format invalid."
- "Should like following: <TokenSpaceGuidCName>.<PcdCName> ")
+ _("Format invalid."
+ "Should like following: <TokenSpaceGuidCName>.<PcdCName> ")
ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR = \
_("The format is incorrect. "
"It should be formated as follows: CName | FeatureFlag.")
@@ -181,7 +181,7 @@ ERR_INF_PARSER_PCD_CVAR_GUID = \
ERR_INF_PARSER_PCD_CVAR_PCDCNAME = \
_("PcdCName must be valid C variable format.")
ERR_INF_PARSER_PCD_VALUE_INVALID = \
- _("The PCD value is incorrect. It must be corrected before continuing.")
+ _("The PCD value is incorrect. It must be corrected before continuing.")
ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID = \
_("Incorrect feature flag expression: %s")
ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING = \
@@ -191,7 +191,7 @@ ERR_INF_PARSER_INVALID_CNAME = \
ERR_INF_PARSER_CNAME_MISSING = \
_("Missing CName. Specify a valid C variable name.")
ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID = \
- _("The Define section contains an invalid keyword: \"%s\"."
+ _("The Define section contains an invalid keyword: \"%s\"."
"It must be corrected before continuing.")
ERR_INF_PARSER_FILE_MISS_DEFINE = \
_("The following file listed in the module "
@@ -206,7 +206,7 @@ ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF = _("EDKI INF is not supported")
ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII = _("The EDKI style comment is not supported in EDKII modules")
ERR_INF_PARSER_FEATUREPCD_USAGE_INVALID = _("The usage for FeaturePcd can only"
- " be type of \"CONSUMES\".")
+ " be type of \"CONSUMES\".")
ERR_INF_PARSER_DEFINE_ITEM_NO_NAME = _("No name specified")
ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE = _("No value specified")
@@ -221,9 +221,9 @@ ERR_INF_NO_PKG_DEPENDENCY_INFO = _("There are no packages defined that use the A
#
ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC = \
_('"%s" is redefined in its dependent DEC files')
-ERR_INF_PARSER_ITEM_DUPLICATE = _("%s define duplicated! "
+ERR_INF_PARSER_ITEM_DUPLICATE = _("%s define duplicated! "
"It must be corrected before continuing.")
-ERR_INF_PARSER_ITEM_DUPLICATE_COMMON = _("%s define duplicated! Item listed"
+ERR_INF_PARSER_ITEM_DUPLICATE_COMMON = _("%s define duplicated! Item listed"
"in an architectural section must not be listed in the common architectural"
"section.It must be corrected before continuing.")
ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR = \
@@ -237,11 +237,11 @@ _("The name 'NULL' for LibraryClass is a reserved word."
ERR_GLOBAL_MARCO_INVALID = \
_("Using global MACRO in INF/DEC is not permitted: %s . "
-"It must be corrected before continuing.")
+"It must be corrected before continuing.")
ERR_MARCO_DEFINITION_MISS_ERROR = \
_("MACRO expand incorrectly, can not find the MACRO definition. "
-"It must be corrected before continuing.")
+"It must be corrected before continuing.")
#
# AsBuilt related
@@ -370,7 +370,7 @@ MSG_RECOVER_FAIL = _('Recovery failed.')
#
ERR_DEPENDENCY_NOT_MATCH = _(
- "Module %s's dependency on package %s (GUID %s Version %s) "
+ "Module %s's dependency on package %s (GUID %s Version %s) "
"cannot be satisfied")
ERR_MODULE_NOT_INSTALLED = _(
"This module is not installed in the workspace: %s\n")
@@ -475,7 +475,7 @@ ERR_FILELIST_EXIST = _(
ERR_COPYRIGHT_CONTENT = _(
"The copyright content must contain the word \"Copyright\" (case insensitive).")
ERR_WRONG_FILELIST_FORMAT = \
-_('File list format is incorrect.'
+_('File list format is incorrect.'
'The correct format is: filename|key=value[|key=value]')
ERR_FILELIST_ATTR = _(
"The value of attribute \"%s\" includes illegal character.")
@@ -593,7 +593,7 @@ _("There are same entries : %s in the UNI file, every kind of entry should be on
ERR_UNIPARSE_ENTRY_ORDER_WRONG = \
_("The string entry order in UNI file should be <AbstractStrings>, <DescriptionStrings>, \
<BinaryAbstractStrings>, <BinaryDescriptionStrings>.")
-ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR = _("The String Token Type %s must be one of the '_PROMPT', '_HELP' and '_ERR_'.")
+ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR = _("The String Token Type %s must be one of the '_PROMPT', '_HELP' and '_ERR_'.")
ERR_UNIPARSE_LINEFEED_UNDER_EXIST = _("Line feed should not exist under this line: %s.")
ERR_UNIPARSE_LINEFEED_UP_EXIST = _("Line feed should not exist up this line: %s.")
ERR_UNI_MISS_STRING_ENTRY = _("String entry missed in this Entry, %s.")
@@ -672,7 +672,7 @@ _("Library class format error, must be Libraryclass|Headerpath.")
ERR_DECPARSE_LIBCLASS_EMPTY = \
_("Class name or file name must not be empty.")
ERR_DECPARSE_LIBCLASS_LIB = \
-_("Class name format error, must start with upper case letter followed with "
+_("Class name format error, must start with upper case letter followed with "
"zero or more alphanumeric characters.")
ERR_DECPARSE_LIBCLASS_PATH_EXT = _("File name must be end with .h.")
ERR_DECPARSE_LIBCLASS_PATH_DOT = _("Path must not include '..'.")
@@ -695,7 +695,7 @@ _("Incorrect value [%s] of type [%s]. Value must be printable and in the "
"form of{...} for array, or ""..."" for string, or L""..."""
"for unicode string.")
ERR_DECPARSE_PCD_VALUE_EMPTY = \
-_("Pcd value can not be empty.")
+_("Pcd value can not be empty.")
ERR_DECPARSE_PCD_BOOL = \
_("Invalid value [%s] of type [%s]; must be expression, TRUE, FALSE, 0 or 1.")
ERR_DECPARSE_PCD_INT = _("Incorrect value [%s] of type [%s]."\
@@ -719,14 +719,14 @@ _("No GUID name specified, must be <CName> = <GuidValueInCFormat>.")
ERR_DECPARSE_CGUID_GUID = \
_("No GUID value specified, must be <CName> = <GuidValueInCFormat>.")
ERR_DECPARSE_CGUID_GUIDFORMAT = \
-_("Incorrect GUID value format, must be <GuidValueInCFormat:"
+_("Incorrect GUID value format, must be <GuidValueInCFormat:"
"{8,4,4,{2,2,2,2,2,2,2,2}}>.")
ERR_DECPARSE_CGUID_NOT_FOUND = _("Unable to find the GUID value of this GUID CName : '%s'.")
ERR_DECPARSE_FILEOPEN = _("Unable to open: [%s].")
ERR_DECPARSE_SECTION_EMPTY = _("Empty sections are not allowed.")
ERR_DECPARSE_SECTION_UE = _("Incorrect UserExtentions format. "
"Must be UserExtenxions.UserId.IdString[.Arch]+.")
-ERR_DECPARSE_SECTION_UE_USERID = _("Invalid UserId, must be underscore"
+ERR_DECPARSE_SECTION_UE_USERID = _("Invalid UserId, must be underscore"
"or alphanumeric characters.")
ERR_DECPARSE_SECTION_UE_IDSTRING = \
_("Incorrect IdString, must be \" ... \".")
@@ -805,7 +805,7 @@ WRN_INF_PARSER_MODULE_INVALID_EVENT_TYPE = \
WRN_INF_PARSER_MODULE_INVALID_BOOTMODE_TYPE = \
_("This is an incorrect BOOTMODE type: %s")
WRN_INVALID_MODULE_TYPE = \
- _("This is an incorrect Module type: %s")
+ _("This is an incorrect Module type: %s")
WRN_MODULE_PARSE_FAILED = \
_("Parsing of this module did not complete correctly: %s.")
WRN_EDK1_INF_FOUND = \
@@ -826,7 +826,7 @@ HLP_PRINT_DEBUG_INFO = _(
"Print DEBUG statements, where DEBUG_LEVEL is 0-9")
HLP_PRINT_INFORMATIONAL_STATEMENT = _("Print informational statements")
HLP_RETURN_NO_DISPLAY = _(
- "Returns only the exit code, informational and error messages are"
+ "Returns only the exit code, informational and error messages are"
" not displayed")
HLP_RETURN_AND_DISPLAY = _(
"Returns the exit code and displays error messages only")
diff --git a/BaseTools/Source/Python/UPT/Logger/ToolError.py b/BaseTools/Source/Python/UPT/Logger/ToolError.py
index 5065b370a3..7f7df9c6d5 100644
--- a/BaseTools/Source/Python/UPT/Logger/ToolError.py
+++ b/BaseTools/Source/Python/UPT/Logger/ToolError.py
@@ -1,11 +1,11 @@
## @file
# Standardized Error Hanlding infrastructures.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
diff --git a/BaseTools/Source/Python/UPT/Logger/__init__.py b/BaseTools/Source/Python/UPT/Logger/__init__.py
index 2881ac7711..494c2d47e9 100644
--- a/BaseTools/Source/Python/UPT/Logger/__init__.py
+++ b/BaseTools/Source/Python/UPT/Logger/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Logger
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/MkPkg.py b/BaseTools/Source/Python/UPT/MkPkg.py
index e7ec328a78..37942b8ed9 100644
--- a/BaseTools/Source/Python/UPT/MkPkg.py
+++ b/BaseTools/Source/Python/UPT/MkPkg.py
@@ -1,11 +1,11 @@
## @file
# Install distribution package.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -55,7 +55,7 @@ from Common.MultipleWorkspace import MultipleWorkspace as mws
## CheckForExistingDp
#
# Check if there is a same name DP file existing
-# @param Path: The path to be checked
+# @param Path: The path to be checked
#
def CheckForExistingDp(Path):
if os.path.exists(Path):
@@ -76,7 +76,7 @@ def Main(Options = None):
if Options is None:
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
try:
- DataBase = GlobalData.gDB
+ DataBase = GlobalData.gDB
ContentFileClosed = True
WorkspaceDir = GlobalData.gWORKSPACE
@@ -85,7 +85,7 @@ def Main(Options = None):
#
if not Options.PackFileToCreate:
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
-
+
#
# Handle if the distribution package file already exists
#
@@ -95,7 +95,7 @@ def Main(Options = None):
# Check package file existing and valid
#
CheckFileList('.DEC', Options.PackageFileList, ST.ERR_INVALID_PACKAGE_NAME, ST.ERR_INVALID_PACKAGE_PATH)
- #
+ #
# Check module file existing and valid
#
CheckFileList('.INF', Options.ModuleFileList, ST.ERR_INVALID_MODULE_NAME, ST.ERR_INVALID_MODULE_PATH)
@@ -104,10 +104,10 @@ def Main(Options = None):
# Get list of files that installed with RePackage attribute available
#
RePkgDict = DataBase.GetRePkgDict()
-
- ContentFile = PackageFile(GlobalData.gCONTENT_FILE, "w")
+
+ ContentFile = PackageFile(GlobalData.gCONTENT_FILE, "w")
ContentFileClosed = False
-
+
#
# Add temp distribution header
#
@@ -118,7 +118,7 @@ def Main(Options = None):
#
# add distribution level tool/misc files
- # before pack, current dir should be workspace dir, else the full
+ # before pack, current dir should be workspace dir, else the full
# path will be in the pack file
#
Cwd = getcwd()
@@ -132,8 +132,8 @@ def Main(Options = None):
FileList += MiscObject.GetFileList()
for FileObject in FileList:
#
- # If you have unicode file names, please convert them to byte
- # strings in your desired encoding before passing them to
+ # If you have unicode file names, please convert them to byte
+ # strings in your desired encoding before passing them to
# write().
#
FromFile = os.path.normpath(FileObject.GetURI()).encode('utf_8')
@@ -151,8 +151,8 @@ def Main(Options = None):
DistPkg.Header.RePackage = True
ContentFile.PackFile(FromFile)
chdir(Cwd)
-
- #
+
+ #
# Add init dp information
#
else:
@@ -160,14 +160,14 @@ def Main(Options = None):
DistPkg.Header.Name = 'Distribution Package'
DistPkg.Header.Guid = str(uuid4())
DistPkg.Header.Version = '1.0'
-
+
DistPkg.GetDistributionPackage(WorkspaceDir, Options.PackageFileList, \
Options.ModuleFileList)
FileList, MetaDataFileList = DistPkg.GetDistributionFileList()
for File in FileList + MetaDataFileList:
FileFullPath = os.path.normpath(os.path.join(WorkspaceDir, File))
#
- # check whether file was included in a distribution that can not
+ # check whether file was included in a distribution that can not
# be repackaged
#
if FileFullPath in RePkgDict:
@@ -182,17 +182,17 @@ def Main(Options = None):
)
else:
DistPkg.Header.RePackage = True
-
+
Cwd = getcwd()
chdir(WorkspaceDir)
ContentFile.PackFiles(FileList)
chdir(Cwd)
-
- Logger.Verbose(ST.MSG_COMPRESS_DISTRIBUTION_PKG)
-
+
+ Logger.Verbose(ST.MSG_COMPRESS_DISTRIBUTION_PKG)
+
ContentFile.Close()
ContentFileClosed = True
-
+
#
# Add Md5Sigature
#
@@ -201,7 +201,7 @@ def Main(Options = None):
# Add current Date
#
DistPkg.Header.Date = str(strftime("%Y-%m-%dT%H:%M:%S", localtime()))
-
+
#
# Finish final dp file
#
@@ -214,7 +214,7 @@ def Main(Options = None):
ReturnCode = 0
except FatalError as XExcept:
- ReturnCode = XExcept.args[0]
+ ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % \
(python_version(), platform) + format_exc())
@@ -247,7 +247,7 @@ def Main(Options = None):
## CheckFileList
-#
+#
# @param QualifiedExt: QualifiedExt
# @param FileList: FileList
# @param ErrorStringExt: ErrorStringExt
@@ -263,7 +263,7 @@ def CheckFileList(QualifiedExt, FileList, ErrorStringExt, ErrorStringFullPath):
if Ext.upper() != QualifiedExt.upper():
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
ErrorStringExt % Item)
-
+
Item = os.path.normpath(Item)
Path = mws.join(WorkspaceDir, Item)
if not os.path.exists(Path):
@@ -274,7 +274,7 @@ def CheckFileList(QualifiedExt, FileList, ErrorStringExt, ErrorStringFullPath):
elif not IsValidPath(Item, WorkspaceDir):
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
ErrorStringExt % Item)
-
+
if not os.path.split(Item)[0]:
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
ST.ERR_INVALID_METAFILE_PATH % Item)
diff --git a/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py b/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py
index ec8a8d44ce..a4fc5876e6 100644
--- a/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py
+++ b/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define common items of class object
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -23,10 +23,10 @@ from Library.DataType import TAB_LANGUAGE_EN_US
class HelpTextObject(object):
def __init__(self):
self.HelpText = TextObject()
-
+
def SetHelpText(self, HelpText):
self.HelpText = HelpText
-
+
def GetHelpText(self):
return self.HelpText
@@ -37,10 +37,10 @@ class HelpTextObject(object):
class HelpTextListObject(object):
def __init__(self):
self.HelpTextList = []
-
+
def SetHelpTextList(self, HelpTextList):
self.HelpTextList = HelpTextList
-
+
def GetHelpTextList(self):
return self.HelpTextList
@@ -51,13 +51,13 @@ class HelpTextListObject(object):
class PromptListObject(object):
def __init__(self):
self.PromptList = []
-
+
def SetPromptList(self, PromptList):
self.PromptList = PromptList
-
+
def GetPromptList(self):
return self.PromptList
-
+
## CommonPropertiesObject
#
# This class defined common attribution used in Module/Platform/Package files
@@ -77,7 +77,7 @@ class CommonPropertiesObject(HelpTextObject, HelpTextListObject):
self.GuidValue = ''
HelpTextObject.__init__(self)
HelpTextListObject.__init__(self)
-
+
def SetUsage(self, Usage):
self.Usage = Usage
@@ -95,10 +95,10 @@ class CommonPropertiesObject(HelpTextObject, HelpTextListObject):
def GetSupArchList(self):
return self.SupArchList
-
+
def SetGuidValue(self, GuidValue):
self.GuidValue = GuidValue
-
+
def GetGuidValue(self):
return self.GuidValue
@@ -199,11 +199,11 @@ class BinaryHeaderObject(object):
def GetBinaryHeaderLicense(self):
return self.BinaryHeaderLicenseList
-
+
## ClonedRecordObject
#
# This class defined ClonedRecord items used in Module/Platform/Package files
-#
+#
# @param object: Inherited from object class
#
class ClonedRecordObject(object):
@@ -285,7 +285,7 @@ class FileNameObject(CommonPropertiesObject):
self.FileType = ''
self.Filename = ''
CommonPropertiesObject.__init__(self)
-
+
def SetFileType(self, FileType):
self.FileType = FileType
@@ -306,10 +306,10 @@ class FileNameObject(CommonPropertiesObject):
#
class FileObject(object):
def __init__(self):
- self.Executable = ''
+ self.Executable = ''
self.Uri = ''
self.OsType = ''
-
+
def SetExecutable(self, Executable):
self.Executable = Executable
@@ -327,8 +327,8 @@ class FileObject(object):
def GetOS(self):
return self.OsType
-
-##
+
+##
# MiscFileObject is used for xml
#
# @param CommonHeaderObject: Inherited from CommonHeaderObject class
@@ -336,18 +336,18 @@ class FileObject(object):
class MiscFileObject(CommonHeaderObject):
def __init__(self):
self.Name = ''
- self.FileList = []
+ self.FileList = []
CommonHeaderObject.__init__(self)
-
+
def SetName(self, Name):
self.Name = Name
-
+
def GetName(self):
return self.Name
-
+
def SetFileList(self, FileList):
self.FileList = FileList
-
+
def GetFileList(self):
return self.FileList
@@ -397,7 +397,7 @@ class IdentificationObject(GuidVersionObject):
self.ModulePath = ''
self.CombinePath = ''
GuidVersionObject.__init__(self)
-
+
def SetName(self, Name):
self.Name = Name
@@ -448,7 +448,7 @@ class IdentificationObject(GuidVersionObject):
## GuidProtocolPpiCommonObject
#
-# This class defined Guid, Protocol and Ppi like items used in
+# This class defined Guid, Protocol and Ppi like items used in
# Module/Platform/Package files
#
# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
@@ -460,7 +460,7 @@ class GuidProtocolPpiCommonObject(CommonPropertiesObject):
self.Guid = ''
self.SupModuleList = []
CommonPropertiesObject.__init__(self)
-
+
def SetName(self, Name):
self.Name = Name
@@ -512,7 +512,7 @@ class GuidObject(GuidProtocolPpiCommonObject):
#
# This class defined Protocol item used in Module/Platform/Package files
#
-# @param GuidProtocolPpiCommonObject: Inherited from
+# @param GuidProtocolPpiCommonObject: Inherited from
# GuidProtocolPpiCommonObject
#
class ProtocolObject(GuidProtocolPpiCommonObject):
@@ -529,7 +529,7 @@ class ProtocolObject(GuidProtocolPpiCommonObject):
#
# This class defined Ppi item used in Module/Platform/Package files
#
-# @param GuidProtocolPpiCommonObject: Inherited from
+# @param GuidProtocolPpiCommonObject: Inherited from
# GuidProtocolPpiCommonObject
#
class PpiObject(GuidProtocolPpiCommonObject):
@@ -566,13 +566,13 @@ class UserExtensionObject(object):
self.BinaryLicenseList = []
self.UniLangDefsList = []
#
- # { Statement : Arch , ... }
+ # { Statement : Arch , ... }
#
- self.DefinesDict = {}
+ self.DefinesDict = {}
#
# { Arch : Statement , ... }
#
- self.BuildOptionDict = {}
+ self.BuildOptionDict = {}
self.IncludesDict = {}
self.SourcesDict = {}
self.BinariesDict = {}
@@ -581,40 +581,40 @@ class UserExtensionObject(object):
#
self.Statement = ''
self.SupArchList = []
-
+
def SetStatement(self, Statement):
self.Statement = Statement
-
+
def GetStatement(self):
return self.Statement
def SetSupArchList(self, ArchList):
self.SupArchList = ArchList
-
+
def GetSupArchList(self):
return self.SupArchList
-
+
def SetUserID(self, UserID):
self.UserID = UserID
-
+
def GetUserID(self):
return self.UserID
-
+
def SetIdentifier(self, Identifier):
self.Identifier = Identifier
-
+
def GetIdentifier(self):
return self.Identifier
-
+
def SetUniLangDefsList(self, UniLangDefsList):
self.UniLangDefsList = UniLangDefsList
-
+
def GetUniLangDefsList(self):
return self.UniLangDefsList
-
+
def SetBinaryAbstract(self, BinaryAbstractList):
self.BinaryAbstractList = BinaryAbstractList
-
+
def GetBinaryAbstract(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryAbstractList:
@@ -623,10 +623,10 @@ class UserExtensionObject(object):
return None
else:
return self.BinaryAbstractList
-
+
def SetBinaryDescription(self, BinaryDescriptionList):
self.BinaryDescriptionList = BinaryDescriptionList
-
+
def GetBinaryDescription(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryDescriptionList:
@@ -635,10 +635,10 @@ class UserExtensionObject(object):
return None
else:
return self.BinaryDescriptionList
-
+
def SetBinaryCopyright(self, BinaryCopyrightList):
self.BinaryCopyrightList = BinaryCopyrightList
-
+
def GetBinaryCopyright(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryCopyrightList:
@@ -647,10 +647,10 @@ class UserExtensionObject(object):
return None
else:
return self.BinaryCopyrightList
-
+
def SetBinaryLicense(self, BinaryLicenseList):
self.BinaryLicenseList = BinaryLicenseList
-
+
def GetBinaryLicense(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryLicenseList:
@@ -659,34 +659,34 @@ class UserExtensionObject(object):
return None
else:
return self.BinaryLicenseList
-
+
def SetDefinesDict(self, DefinesDict):
self.DefinesDict = DefinesDict
-
+
def GetDefinesDict(self):
return self.DefinesDict
-
+
def SetBuildOptionDict(self, BuildOptionDict):
self.BuildOptionDict = BuildOptionDict
-
+
def GetBuildOptionDict(self):
return self.BuildOptionDict
def SetIncludesDict(self, IncludesDict):
self.IncludesDict = IncludesDict
-
+
def GetIncludesDict(self):
return self.IncludesDict
def SetSourcesDict(self, SourcesDict):
self.SourcesDict = SourcesDict
-
+
def GetSourcesDict(self):
return self.SourcesDict
def SetBinariesDict(self, BinariesDict):
self.BinariesDict = BinariesDict
-
+
def GetBinariesDict(self):
return self.BinariesDict
@@ -703,28 +703,28 @@ class LibraryClassObject(CommonPropertiesObject):
self.SupModuleList = []
self.RecommendedInstance = GuidVersionObject()
CommonPropertiesObject.__init__(self)
-
+
def SetLibraryClass(self, LibraryClass):
self.LibraryClass = LibraryClass
-
+
def GetLibraryClass(self):
return self.LibraryClass
-
+
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
-
+
def GetSupModuleList(self):
return self.SupModuleList
-
+
def SetIncludeHeader(self, IncludeHeader):
self.IncludeHeader = IncludeHeader
-
+
def GetIncludeHeader(self):
return self.IncludeHeader
-
+
def SetRecommendedInstance(self, RecommendedInstance):
self.RecommendedInstance = RecommendedInstance
-
+
def GetRecommendedInstance(self):
return self.RecommendedInstance
@@ -748,69 +748,69 @@ class PcdErrorObject(object):
def SetValidValue(self, ValidValue):
self.ValidValue = ValidValue
-
+
def GetValidValue(self):
return self.ValidValue
-
+
def SetValidValueLang(self, ValidValueLang):
self.ValidValueLang = ValidValueLang
-
+
def GetValidValueLang(self):
return self.ValidValueLang
-
+
def SetValidValueRange(self, ValidValueRange):
self.ValidValueRange = ValidValueRange
-
+
def GetValidValueRange(self):
return self.ValidValueRange
-
+
def SetExpression(self, Expression):
self.Expression = Expression
-
+
def GetExpression(self):
return self.Expression
-
+
def SetErrorNumber(self, ErrorNumber):
self.ErrorNumber = ErrorNumber
-
+
def GetErrorNumber(self):
return self.ErrorNumber
-
+
def SetErrorMessageList(self, ErrorMessageList):
self.ErrorMessageList = ErrorMessageList
-
+
def GetErrorMessageList(self):
return self.ErrorMessageList
-
+
def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
self.TokenSpaceGuidCName = TokenSpaceGuidCName
-
+
def GetTokenSpaceGuidCName(self):
return self.TokenSpaceGuidCName
-
+
def SetCName(self, CName):
self.CName = CName
-
+
def GetCName(self):
return self.CName
-
+
def SetFileLine(self, FileLine):
self.FileLine = FileLine
-
+
def GetFileLine(self):
return self.FileLine
-
+
def SetLineNum(self, LineNum):
self.LineNum = LineNum
-
+
def GetLineNum(self):
return self.LineNum
-
-
+
+
## IncludeObject
#
# This class defined Include item used in Module/Platform/Package files
-#
+#
# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
#
class IncludeObject(CommonPropertiesObject):
@@ -820,31 +820,31 @@ class IncludeObject(CommonPropertiesObject):
self.SupModuleList = []
self.Comment = ''
CommonPropertiesObject.__init__(self)
-
+
def SetFilePath(self, FilePath):
self.FilePath = FilePath
-
+
def GetFilePath(self):
return self.FilePath
-
+
def SetModuleType(self, ModuleType):
self.ModuleType = ModuleType
-
+
def GetModuleType(self):
return self.ModuleType
-
+
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
-
+
def GetSupModuleList(self):
- return self.SupModuleList
-
+ return self.SupModuleList
+
def SetComment(self, Comment):
self.Comment = Comment
-
+
def GetComment(self):
- return self.Comment
-
+ return self.Comment
+
## PcdObject
#
# This class defined Pcd item used in Module/Platform/Package files
@@ -874,86 +874,86 @@ class PcdObject(CommonPropertiesObject, HelpTextListObject, PromptListObject):
self.Offset = ''
self.ValidUsage = ''
self.ItemType = ''
- self.PcdErrorsList = []
- self.SupModuleList = []
+ self.PcdErrorsList = []
+ self.SupModuleList = []
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
PromptListObject.__init__(self)
-
+
def SetPcdCName(self, PcdCName):
self.PcdCName = PcdCName
-
+
def GetPcdCName(self):
return self.PcdCName
-
+
def SetCName(self, CName):
self.CName = CName
-
+
def GetCName(self):
return self.CName
-
+
def SetToken(self, Token):
self.Token = Token
-
+
def GetOffset(self):
return self.Offset
def SetOffset(self, Offset):
self.Offset = Offset
-
+
def GetToken(self):
return self.Token
-
+
def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
self.TokenSpaceGuidCName = TokenSpaceGuidCName
-
+
def GetTokenSpaceGuidCName(self):
return self.TokenSpaceGuidCName
-
+
def SetTokenSpaceGuidValue(self, TokenSpaceGuidValue):
self.TokenSpaceGuidValue = TokenSpaceGuidValue
-
+
def GetTokenSpaceGuidValue(self):
return self.TokenSpaceGuidValue
-
+
def SetDatumType(self, DatumType):
self.DatumType = DatumType
-
+
def GetDatumType(self):
return self.DatumType
-
+
def SetMaxDatumSize(self, MaxDatumSize):
self.MaxDatumSize = MaxDatumSize
-
+
def GetMaxDatumSize(self):
return self.MaxDatumSize
def SetDefaultValue(self, DefaultValue):
self.DefaultValue = DefaultValue
-
+
def GetDefaultValue(self):
return self.DefaultValue
def SetValidUsage(self, ValidUsage):
self.ValidUsage = ValidUsage
-
+
def GetValidUsage(self):
return self.ValidUsage
-
+
def SetPcdErrorsList(self, PcdErrorsList):
self.PcdErrorsList = PcdErrorsList
-
+
def GetPcdErrorsList(self):
return self.PcdErrorsList
def SetItemType(self, ItemType):
self.ItemType = ItemType
-
+
def GetItemType(self):
return self.ItemType
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
-
+
def GetSupModuleList(self):
return self.SupModuleList
diff --git a/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py b/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py
index 4052d28835..6bccd2ba4f 100644
--- a/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py
+++ b/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define a class object to describe a module
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,16 +17,16 @@ ModuleObject
##
# Import Modules
-#
+#
from Object.POM.CommonObject import CommonPropertiesObject
from Object.POM.CommonObject import IdentificationObject
from Object.POM.CommonObject import CommonHeaderObject
from Object.POM.CommonObject import BinaryHeaderObject
from Object.POM.CommonObject import HelpTextListObject
from Object.POM.CommonObject import GuidVersionObject
-
-##
+
+##
# BootModeObject
#
class BootModeObject(CommonPropertiesObject, HelpTextListObject):
@@ -34,10 +34,10 @@ class BootModeObject(CommonPropertiesObject, HelpTextListObject):
self.SupportedBootModes = ''
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
-
+
def SetSupportedBootModes(self, SupportedBootModes):
self.SupportedBootModes = SupportedBootModes
-
+
def GetSupportedBootModes(self):
return self.SupportedBootModes
@@ -49,10 +49,10 @@ class EventObject(CommonPropertiesObject, HelpTextListObject):
self.EventType = ''
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
-
+
def SetEventType(self, EventType):
self.EventType = EventType
-
+
def GetEventType(self):
return self.EventType
@@ -64,10 +64,10 @@ class HobObject(CommonPropertiesObject, HelpTextListObject):
self.HobType = ''
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
-
+
def SetHobType(self, HobType):
self.HobType = HobType
-
+
def GetHobType(self):
return self.HobType
@@ -78,23 +78,23 @@ class SpecObject(object):
def __init__(self):
self.Spec = ''
self.Version = ''
-
+
def SetSpec(self, Spec):
self.Spec = Spec
-
+
def GetSpec(self):
return self.Spec
-
+
def SetVersion(self, Version):
self.Version = Version
-
+
def GetVersion(self):
return self.Version
## ModuleHeaderObject
#
# This class defined header items used in Module file
-#
+#
class ModuleHeaderObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject):
def __init__(self):
self.IsLibrary = False
@@ -109,97 +109,97 @@ class ModuleHeaderObject(IdentificationObject, CommonHeaderObject, BinaryHeaderO
#
# SpecObject
#
- self.SpecList = []
+ self.SpecList = []
#
# BootModeObject
#
- self.BootModeList = []
+ self.BootModeList = []
#
# EventObject
#
- self.EventList = []
+ self.EventList = []
#
# HobObject
#
self.HobList = []
- #
+ #
# LibraryClassObject
#
- self.LibraryClassList = []
+ self.LibraryClassList = []
self.SupArchList = []
IdentificationObject.__init__(self)
CommonHeaderObject.__init__(self)
BinaryHeaderObject.__init__(self)
-
+
def SetIsLibrary(self, IsLibrary):
self.IsLibrary = IsLibrary
-
+
def GetIsLibrary(self):
return self.IsLibrary
-
+
def SetIsLibraryModList(self, IsLibraryModList):
self.IsLibraryModList = IsLibraryModList
-
+
def GetIsLibraryModList(self):
return self.IsLibraryModList
-
+
def SetModuleType(self, ModuleType):
self.ModuleType = ModuleType
-
+
def GetModuleType(self):
return self.ModuleType
-
+
def SetBinaryModule(self, BinaryModule):
self.BinaryModule = BinaryModule
-
+
def GetBinaryModule(self):
return self.BinaryModule
-
+
def SetPcdIsDriver(self, PcdIsDriver):
self.PcdIsDriver = PcdIsDriver
-
+
def GetPcdIsDriver(self):
return self.PcdIsDriver
-
+
def SetPiSpecificationVersion(self, PiSpecificationVersion):
self.PiSpecificationVersion = PiSpecificationVersion
-
+
def GetPiSpecificationVersion(self):
return self.PiSpecificationVersion
-
+
def SetUefiSpecificationVersion(self, UefiSpecificationVersion):
self.UefiSpecificationVersion = UefiSpecificationVersion
-
+
def GetUefiSpecificationVersion(self):
return self.UefiSpecificationVersion
-
+
def SetSpecList(self, SpecList):
self.SpecList = SpecList
-
+
def GetSpecList(self):
return self.SpecList
-
+
def SetBootModeList(self, BootModeList):
self.BootModeList = BootModeList
-
+
def GetBootModeList(self):
return self.BootModeList
-
+
def SetEventList(self, EventList):
self.EventList = EventList
-
+
def GetEventList(self):
return self.EventList
-
+
def SetHobList(self, HobList):
self.HobList = HobList
-
+
def GetHobList(self):
return self.HobList
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
-
+
def GetLibraryClassList(self):
return self.LibraryClassList
@@ -225,38 +225,38 @@ class SourceFileObject(CommonPropertiesObject):
self.ToolCode = ''
self.Family = ''
self.FileType = ''
-
+
def SetSourceFile(self, SourceFile):
self.SourceFile = SourceFile
-
+
def GetSourceFile(self):
return self.SourceFile
-
+
def SetTagName(self, TagName):
self.TagName = TagName
-
+
def GetTagName(self):
return self.TagName
-
+
def SetToolCode(self, ToolCode):
self.ToolCode = ToolCode
-
+
def GetToolCode(self):
return self.ToolCode
-
+
def SetFamily(self, Family):
self.Family = Family
-
+
def GetFamily(self):
return self.Family
-
+
def SetFileType(self, FileType):
self.FileType = FileType
-
+
def GetFileType(self):
return self.FileType
-
+
##
# BinaryFileObject
#
@@ -265,7 +265,7 @@ class BinaryFileObject(CommonPropertiesObject):
self.FileNamList = []
self.AsBuiltList = []
CommonPropertiesObject.__init__(self)
-
+
def SetFileNameList(self, FileNamList):
self.FileNamList = FileNamList
@@ -277,7 +277,7 @@ class BinaryFileObject(CommonPropertiesObject):
def GetAsBuiltList(self):
return self.AsBuiltList
-
+
##
# AsBuildLibraryClassObject
@@ -287,12 +287,12 @@ class AsBuildLibraryClassObject(object):
self.LibGuid = ''
self.LibVersion = ''
self.SupArchList = []
-
+
def SetLibGuid(self, LibGuid):
self.LibGuid = LibGuid
def GetLibGuid(self):
return self.LibGuid
-
+
def SetLibVersion(self, LibVersion):
self.LibVersion = LibVersion
def GetLibVersion(self):
@@ -342,7 +342,7 @@ class AsBuiltObject(object):
def GetLibraryInstancesList(self):
return self.LibraryInstancesList
-
+
def SetBuildFlagsList(self, BinaryBuildFlagList):
self.BinaryBuildFlagList = BinaryBuildFlagList
@@ -351,7 +351,7 @@ class AsBuiltObject(object):
##
# BinaryBuildFlag, this object will include those fields that are not
-# covered by the UPT Spec BinaryFile field
+# covered by the UPT Spec BinaryFile field
#
class BinaryBuildFlagObject(object):
def __init__(self):
@@ -359,31 +359,31 @@ class BinaryBuildFlagObject(object):
self.TagName = ''
self.Family = ''
self.AsBuiltOptionFlags = ''
-
+
def SetTarget(self, Target):
self.Target = Target
def GetTarget(self):
- return self.Target
+ return self.Target
def SetTagName(self, TagName):
self.TagName = TagName
-
+
def GetTagName(self):
return self.TagName
-
+
def SetFamily(self, Family):
self.Family = Family
-
+
def GetFamily(self):
return self.Family
-
+
def SetAsBuiltOptionFlags(self, AsBuiltOptionFlags):
self.AsBuiltOptionFlags = AsBuiltOptionFlags
def GetAsBuiltOptionFlags(self):
return self.AsBuiltOptionFlags
-
-##
+
+##
# ExternObject
#
class ExternObject(CommonPropertiesObject):
@@ -394,31 +394,31 @@ class ExternObject(CommonPropertiesObject):
self.Destructor = ''
self.SupModList = []
CommonPropertiesObject.__init__(self)
-
+
def SetEntryPoint(self, EntryPoint):
self.EntryPoint = EntryPoint
-
+
def GetEntryPoint(self):
return self.EntryPoint
-
+
def SetUnloadImage(self, UnloadImage):
self.UnloadImage = UnloadImage
-
+
def GetUnloadImage(self):
return self.UnloadImage
-
+
def SetConstructor(self, Constructor):
self.Constructor = Constructor
-
+
def GetConstructor(self):
return self.Constructor
-
+
def SetDestructor(self, Destructor):
self.Destructor = Destructor
-
+
def GetDestructor(self):
return self.Destructor
-
+
def SetSupModList(self, SupModList):
self.SupModList = SupModList
def GetSupModList(self):
@@ -432,16 +432,16 @@ class DepexObject(CommonPropertiesObject):
self.Depex = ''
self.ModuelType = ''
CommonPropertiesObject.__init__(self)
-
+
def SetDepex(self, Depex):
self.Depex = Depex
-
+
def GetDepex(self):
return self.Depex
-
+
def SetModuleType(self, ModuleType):
self.ModuelType = ModuleType
-
+
def GetModuleType(self):
return self.ModuelType
@@ -454,16 +454,16 @@ class PackageDependencyObject(GuidVersionObject, CommonPropertiesObject):
self.PackageFilePath = ''
GuidVersionObject.__init__(self)
CommonPropertiesObject.__init__(self)
-
+
def SetPackageFilePath(self, PackageFilePath):
self.PackageFilePath = PackageFilePath
-
+
def GetPackageFilePath(self):
return self.PackageFilePath
def SetPackage(self, Package):
self.Package = Package
-
+
def GetPackage(self):
return self.Package
@@ -474,13 +474,13 @@ class BuildOptionObject(CommonPropertiesObject):
def __init__(self):
CommonPropertiesObject.__init__(self)
self.BuildOption = ''
-
+
def SetBuildOption(self, BuildOption):
self.BuildOption = BuildOption
-
+
def GetBuildOption(self):
return self.BuildOption
-
+
##
# ModuleObject
#
@@ -489,172 +489,172 @@ class ModuleObject(ModuleHeaderObject):
#
# {Arch : ModuleHeaderObject}
#
- self.HeaderDict = {}
+ self.HeaderDict = {}
#
# LibraryClassObject
#
- self.LibraryClassList = []
+ self.LibraryClassList = []
#
# SourceFileObject
#
- self.SourceFileList = []
+ self.SourceFileList = []
#
# BinaryFileObject
#
- self.BinaryFileList = []
+ self.BinaryFileList = []
#
# PackageDependencyObject
#
- self.PackageDependencyList = []
+ self.PackageDependencyList = []
#
# DepexObject
#
- self.PeiDepex = []
+ self.PeiDepex = []
#
# DepexObject
#
- self.DxeDepex = []
+ self.DxeDepex = []
#
# DepexObject
#
- self.SmmDepex = []
+ self.SmmDepex = []
#
# ProtocolObject
#
- self.ProtocolList = []
+ self.ProtocolList = []
#
# PpiObject
#
- self.PpiList = []
+ self.PpiList = []
#
# GuidObject
#
- self.GuidList = []
+ self.GuidList = []
#
# PcdObject
#
- self.PcdList = []
+ self.PcdList = []
#
# ExternObject
#
- self.ExternList = []
+ self.ExternList = []
#
# BuildOptionObject
#
- self.BuildOptionList = []
+ self.BuildOptionList = []
#
# UserExtensionObject
#
- self.UserExtensionList = []
+ self.UserExtensionList = []
#
# MiscFileObject
#
- self.MiscFileList = []
+ self.MiscFileList = []
#
# ClonedFromObject
#
- self.ClonedFrom = None
-
+ self.ClonedFrom = None
+
ModuleHeaderObject.__init__(self)
-
+
def SetHeaderDict(self, HeaderDict):
self.HeaderDict = HeaderDict
-
+
def GetHeaderDict(self):
return self.HeaderDict
-
+
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
-
+
def GetLibraryClassList(self):
return self.LibraryClassList
-
+
def SetSourceFileList(self, SourceFileList):
self.SourceFileList = SourceFileList
-
+
def GetSourceFileList(self):
return self.SourceFileList
-
+
def SetBinaryFileList(self, BinaryFileList):
self.BinaryFileList = BinaryFileList
def GetBinaryFileList(self):
return self.BinaryFileList
-
+
def SetPackageDependencyList(self, PackageDependencyList):
self.PackageDependencyList = PackageDependencyList
-
+
def GetPackageDependencyList(self):
return self.PackageDependencyList
-
+
def SetPeiDepex(self, PeiDepex):
self.PeiDepex = PeiDepex
-
+
def GetPeiDepex(self):
return self.PeiDepex
-
+
def SetDxeDepex(self, DxeDepex):
self.DxeDepex = DxeDepex
-
+
def GetDxeDepex(self):
return self.DxeDepex
-
+
def SetSmmDepex(self, SmmDepex):
self.SmmDepex = SmmDepex
-
+
def GetSmmDepex(self):
return self.SmmDepex
-
+
def SetPpiList(self, PpiList):
self.PpiList = PpiList
-
+
def GetPpiList(self):
return self.PpiList
def SetProtocolList(self, ProtocolList):
self.ProtocolList = ProtocolList
-
+
def GetProtocolList(self):
return self.ProtocolList
def SetPcdList(self, PcdList):
self.PcdList = PcdList
-
+
def GetPcdList(self):
return self.PcdList
-
+
def SetGuidList(self, GuidList):
self.GuidList = GuidList
-
+
def GetGuidList(self):
return self.GuidList
-
+
def SetExternList(self, ExternList):
self.ExternList = ExternList
def GetExternList(self):
return self.ExternList
-
+
def SetBuildOptionList(self, BuildOptionList):
self.BuildOptionList = BuildOptionList
-
+
def GetBuildOptionList(self):
return self.BuildOptionList
-
+
def SetUserExtensionList(self, UserExtensionList):
self.UserExtensionList = UserExtensionList
-
+
def GetUserExtensionList(self):
return self.UserExtensionList
-
+
def SetMiscFileList(self, MiscFileList):
self.MiscFileList = MiscFileList
-
+
def GetMiscFileList(self):
return self.MiscFileList
-
+
def SetClonedFrom(self, ClonedFrom):
self.ClonedFrom = ClonedFrom
-
+
def GetClonedFrom(self):
return self.ClonedFrom
diff --git a/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py b/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py
index 57bdcf96a3..46de837a44 100644
--- a/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py
+++ b/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define a class object to describe a package
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -30,10 +30,10 @@ class StandardIncludeFileObject(CommonPropertiesObject):
def __init__(self):
CommonPropertiesObject.__init__(self)
self.IncludeFile = ''
-
+
def SetIncludeFile(self, IncludeFile):
self.IncludeFile = IncludeFile
-
+
def GetIncludeFile(self):
return self.IncludeFile
@@ -53,19 +53,19 @@ class PackageObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject
#
# LibraryClassObject
#
- self.LibraryClassList = []
+ self.LibraryClassList = []
#
# FileObject
#
- self.IncludePathList = []
+ self.IncludePathList = []
#
# StandardIncludeFileObject
#
- self.StandardIncludeFileList = []
+ self.StandardIncludeFileList = []
#
# PackageIncludeFileObject
#
- self.PackageIncludeFileList = []
+ self.PackageIncludeFileList = []
#
# Include and Arch List, item is (IncludePath, SupArchList-List of Arch), used during install package
#
@@ -73,126 +73,126 @@ class PackageObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject
#
# ProtocolObject
#
- self.ProtocolList = []
+ self.ProtocolList = []
#
# PpiObject
#
- self.PpiList = []
+ self.PpiList = []
#
# GuidObject
#
- self.GuidList = []
+ self.GuidList = []
#
# (PcdObject, PcdErrorObject)
#
- self.PcdList = []
+ self.PcdList = []
#
# {(PcdTokenSpaceGuidCName, PcdErrroNumber): PcdErrorMessageList}
#
self.PcdErrorCommentDict = {}
#
# UserExtensionObject
- #
- self.UserExtensionList = []
+ #
+ self.UserExtensionList = []
#
# MiscFileObject
#
- self.MiscFileList = []
+ self.MiscFileList = []
self.ModuleDict = Sdict()
#
# ClonedRecordObject
#
- self.ClonedFromList = []
+ self.ClonedFromList = []
#
# string object
#
- self.ModuleFileList = []
-
+ self.ModuleFileList = []
+
self.PcdChecks = []
-
+
self.UNIFlag = False
-
+
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
-
+
def GetLibraryClassList(self):
return self.LibraryClassList
-
+
def SetIncludePathList(self, IncludePathList):
self.IncludePathList = IncludePathList
-
+
def GetIncludePathList(self):
return self.IncludePathList
def SetIncludeArchList(self, IncludeArchList):
self.IncludeArchList = IncludeArchList
-
+
def GetIncludeArchList(self):
- return self.IncludeArchList
-
+ return self.IncludeArchList
+
def SetStandardIncludeFileList(self, StandardIncludeFileList):
self.StandardIncludeFileList = StandardIncludeFileList
-
+
def GetStandardIncludeFileList(self):
return self.StandardIncludeFileList
-
+
def SetPackageIncludeFileList(self, PackageIncludeFileList):
self.PackageIncludeFileList = PackageIncludeFileList
-
+
def GetPackageIncludeFileList(self):
return self.PackageIncludeFileList
-
+
def SetProtocolList(self, ProtocolList):
self.ProtocolList = ProtocolList
-
+
def GetProtocolList(self):
return self.ProtocolList
-
+
def SetPpiList(self, PpiList):
self.PpiList = PpiList
-
+
def GetPpiList(self):
return self.PpiList
-
+
def SetGuidList(self, GuidList):
self.GuidList = GuidList
-
+
def GetGuidList(self):
return self.GuidList
-
+
def SetPcdList(self, PcdList):
self.PcdList = PcdList
-
+
def GetPcdList(self):
return self.PcdList
-
+
def SetUserExtensionList(self, UserExtensionList):
self.UserExtensionList = UserExtensionList
-
+
def GetUserExtensionList(self):
return self.UserExtensionList
-
+
def SetMiscFileList(self, MiscFileList):
self.MiscFileList = MiscFileList
-
+
def GetMiscFileList(self):
return self.MiscFileList
def SetModuleDict(self, ModuleDict):
self.ModuleDict = ModuleDict
-
+
def GetModuleDict(self):
return self.ModuleDict
def SetClonedFromList(self, ClonedFromList):
self.ClonedFromList = ClonedFromList
-
+
def GetClonedFromList(self):
return self.ClonedFromList
def SetModuleFileList(self, ModuleFileList):
self.ModuleFileList = ModuleFileList
-
+
def GetModuleFileList(self):
return self.ModuleFileList
diff --git a/BaseTools/Source/Python/UPT/Object/POM/__init__.py b/BaseTools/Source/Python/UPT/Object/POM/__init__.py
index e2235f1bf0..fdf3d8bd25 100644
--- a/BaseTools/Source/Python/UPT/Object/POM/__init__.py
+++ b/BaseTools/Source/Python/UPT/Object/POM/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
POM
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py b/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
index 6336a90fb9..302d3050aa 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects for DEC file. It will consumed by
+# This file is used to define class objects for DEC file. It will consumed by
#DecParser
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -49,7 +49,7 @@ class _DecComments:
return self._HeadComment, self._TailComment
## GetHeadComment
- #
+ #
def GetHeadComment(self):
return self._HeadComment
@@ -61,7 +61,7 @@ class _DecComments:
self._HeadComment = Comment
## GetTailComment
- #
+ #
def GetTailComment(self):
return self._TailComment
@@ -89,22 +89,22 @@ class _DecBaseObject(_DecComments):
self._SecName = ''
## GetSectionName
- #
+ #
def GetSectionName(self):
return self._SecName
## GetPackagePath
- #
+ #
def GetPackagePath(self):
return self._PackagePath
## GetPackageFile
- #
+ #
def GetPackageFile(self):
return self._FileName
## GetPackageFullName
- #
+ #
def GetPackageFullName(self):
return self._PkgFullName
@@ -151,7 +151,7 @@ class _DecBaseObject(_DecComments):
## _DecItemBaseObject
#
-# Module type and arch the item belongs to
+# Module type and arch the item belongs to
#
class _DecItemBaseObject(_DecComments):
def __init__(self):
@@ -162,7 +162,7 @@ class _DecItemBaseObject(_DecComments):
self.ArchAndModuleType = []
## GetArchList
- #
+ #
def GetArchList(self):
ArchSet = set()
for Arch in self.ArchAndModuleType:
@@ -184,7 +184,7 @@ class DecDefineObject(_DecBaseObject):
self._PkgUniFile = ''
## GetPackageSpecification
- #
+ #
def GetPackageSpecification(self):
return self._DecSpec
@@ -192,7 +192,7 @@ class DecDefineObject(_DecBaseObject):
self._DecSpec = DecSpec
## GetPackageName
- #
+ #
def GetPackageName(self):
return self._PkgName
@@ -200,7 +200,7 @@ class DecDefineObject(_DecBaseObject):
self._PkgName = PkgName
## GetPackageGuid
- #
+ #
def GetPackageGuid(self):
return self._PkgGuid
@@ -208,7 +208,7 @@ class DecDefineObject(_DecBaseObject):
self._PkgGuid = PkgGuid
## GetPackageVersion
- #
+ #
def GetPackageVersion(self):
return self._PkgVersion
@@ -216,7 +216,7 @@ class DecDefineObject(_DecBaseObject):
self._PkgVersion = PkgVersion
## GetPackageUniFile
- #
+ #
def GetPackageUniFile(self):
return self._PkgUniFile
@@ -224,12 +224,12 @@ class DecDefineObject(_DecBaseObject):
self._PkgUniFile = PkgUniFile
## GetDefines
- #
+ #
def GetDefines(self):
return self._GetItemByArch(TAB_ARCH_COMMON)
## GetAllDefines
- #
+ #
def GetAllDefines(self):
return self._GetAllItems()
@@ -244,7 +244,7 @@ class DecDefineItemObject(_DecItemBaseObject):
self.Value = ''
## __hash__
- #
+ #
def __hash__(self):
return hash(self.Key + self.Value)
@@ -254,7 +254,7 @@ class DecDefineItemObject(_DecItemBaseObject):
return id(self) == id(Other)
## __str__
- #
+ #
def __str__(self):
return str(self.ArchAndModuleType) + '\n' + self.Key + \
' = ' + self.Value
@@ -269,12 +269,12 @@ class DecIncludeObject(_DecBaseObject):
self._SecName = TAB_INCLUDES.upper()
## GetIncludes
- #
+ #
def GetIncludes(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllIncludes
- #
+ #
def GetAllIncludes(self):
return self._GetAllItems()
@@ -289,7 +289,7 @@ class DecIncludeItemObject(_DecItemBaseObject):
_DecItemBaseObject.__init__(self)
## __hash__
- #
+ #
def __hash__(self):
return hash(self.File)
@@ -299,7 +299,7 @@ class DecIncludeItemObject(_DecItemBaseObject):
return id(self) == id(Other)
## __str__
- #
+ #
def __str__(self):
return self.File
@@ -314,12 +314,12 @@ class DecLibraryclassObject(_DecBaseObject):
self._SecName = TAB_LIBRARY_CLASSES.upper()
## GetLibraryclasses
- #
+ #
def GetLibraryclasses(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllLibraryclasses
- #
+ #
def GetAllLibraryclasses(self):
return self._GetAllItems()
@@ -334,7 +334,7 @@ class DecLibraryclassItemObject(_DecItemBaseObject):
self.Libraryclass = Libraryclass
## __hash__
- #
+ #
def __hash__(self):
return hash(self.Libraryclass + self.File)
@@ -344,7 +344,7 @@ class DecLibraryclassItemObject(_DecItemBaseObject):
return id(self) == id(Other)
## __str__
- #
+ #
def __str__(self):
return self.Libraryclass + '|' + self.File
@@ -381,7 +381,7 @@ class DecPcdObject(_DecBaseObject):
#
# @param PcdType: PcdType
# @param Arch: Arch
- #
+ #
def GetPcds(self, PcdType, Arch=TAB_ARCH_COMMON):
PcdType = PcdType.upper()
Arch = Arch.upper()
@@ -392,7 +392,7 @@ class DecPcdObject(_DecBaseObject):
## GetPcdsByType
#
# @param PcdType: PcdType
- #
+ #
def GetPcdsByType(self, PcdType):
PcdType = PcdType.upper()
Retlst = []
@@ -422,7 +422,7 @@ class DecPcdItemObject(_DecItemBaseObject):
self.MaxDatumSize = MaxDatumSize
## __hash__
- #
+ #
def __hash__(self):
return hash(self.TokenSpaceGuidCName + self.TokenCName)
@@ -434,7 +434,7 @@ class DecPcdItemObject(_DecItemBaseObject):
## GetArchListOfType
#
# @param PcdType: PcdType
- #
+ #
def GetArchListOfType(self, PcdType):
ItemSet = set()
PcdType = PcdType.upper()
@@ -458,12 +458,12 @@ class DecGuidObjectBase(_DecBaseObject):
## GetGuidStyleItems
#
# @param Arch: Arch
- #
+ #
def GetGuidStyleItems(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetGuidStyleAllItems
- #
+ #
def GetGuidStyleAllItems(self):
return self._GetAllItems()
@@ -481,7 +481,7 @@ class DecGuidItemObject(_DecItemBaseObject):
self.GuidString = GuidString
## __hash__
- #
+ #
def __hash__(self):
return hash(self.GuidCName)
@@ -491,7 +491,7 @@ class DecGuidItemObject(_DecItemBaseObject):
return id(self) == id(Other)
## __str__
- #
+ #
def __str__(self):
return self.GuidCName + ' = ' + self.GuidCValue
@@ -507,14 +507,14 @@ class DecGuidObject(DecGuidObjectBase):
self._SecName = TAB_GUIDS.upper()
## GetGuids
- #
+ #
# @param Arch: Arch
#
def GetGuids(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllGuids
- #
+ #
def GetAllGuids(self):
return self._GetAllItems()
@@ -530,14 +530,14 @@ class DecPpiObject(DecGuidObjectBase):
self._SecName = TAB_PPIS.upper()
## GetPpis
- #
+ #
# @param Arch: Arch
- #
+ #
def GetPpis(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllPpis
- #
+ #
def GetAllPpis(self):
return self._GetAllItems()
@@ -553,14 +553,14 @@ class DecProtocolObject(DecGuidObjectBase):
self._SecName = TAB_PROTOCOLS.upper()
## GetProtocols
- #
+ #
# @param Arch: Arch
- #
+ #
def GetProtocols(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllProtocols
- #
+ #
def GetAllProtocols(self):
return self._GetAllItems()
@@ -577,10 +577,10 @@ class DecUserExtensionObject(_DecBaseObject):
self.ItemList = []
## GetProtocols
- #
+ #
# @param Item: Item
# @param Scope: Scope
- #
+ #
def AddItem(self, Item, Scope):
if not Scope:
pass
@@ -589,7 +589,7 @@ class DecUserExtensionObject(_DecBaseObject):
self.ItemList.append(Item)
## GetAllUserExtensions
- #
+ #
def GetAllUserExtensions(self):
return self.ItemList
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
index cc2fc49053..af5af46ab2 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Binaries] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Binaries] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -117,7 +117,7 @@ class InfBianryCommonItem(InfBianryItem, CurrentLine):
self.Family = Family
def GetFamily(self):
return self.Family
-
+
def SetGuidValue(self, GuidValue):
self.GuidValue = GuidValue
def GetGuidValue(self):
@@ -228,7 +228,7 @@ class InfBinariesObject(InfSectionCommonDef):
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
#
- # Validate Feature Flag Express
+ # Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(VerContent[3].\
strip())
@@ -246,15 +246,15 @@ class InfBinariesObject(InfSectionCommonDef):
#
# Determine binary file name duplicate. Follow below rule:
#
- # A binary filename must not be duplicated within
- # a [Binaries] section. A binary filename may appear in
- # multiple architectural [Binaries] sections. A binary
- # filename listed in an architectural [Binaries] section
- # must not be listed in the common architectural
+ # A binary filename must not be duplicated within
+ # a [Binaries] section. A binary filename may appear in
+ # multiple architectural [Binaries] sections. A binary
+ # filename listed in an architectural [Binaries] section
+ # must not be listed in the common architectural
# [Binaries] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
for Item in self.Binaries:
if Item.GetFileName() == InfBianryVerItemObj.GetFileName():
ItemSupArchList = Item.GetSupArchList()
@@ -315,7 +315,7 @@ class InfBinariesObject(InfSectionCommonDef):
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
return False
-
+
if len(ItemContent) > 7:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
@@ -335,7 +335,7 @@ class InfBinariesObject(InfSectionCommonDef):
BinaryFileType = ItemContent[0].strip()
if BinaryFileType == 'RAW' or BinaryFileType == 'ACPI' or BinaryFileType == 'ASL':
BinaryFileType = 'BIN'
-
+
if BinaryFileType not in DT.BINARY_FILE_TYPE_LIST:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
@@ -344,10 +344,10 @@ class InfBinariesObject(InfSectionCommonDef):
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
-
+
if BinaryFileType == 'SUBTYPE_GUID':
BinaryFileType = 'FREEFORM'
-
+
if BinaryFileType == 'LIB' or BinaryFileType == 'UEFI_APP':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
@@ -407,7 +407,7 @@ class InfBinariesObject(InfSectionCommonDef):
#
if BinaryFileType != 'FREEFORM':
InfBianryCommonItemObj.SetTarget(ItemContent[2])
-
+
if len(ItemContent) >= 4:
#
# Add Family information
@@ -416,13 +416,13 @@ class InfBinariesObject(InfSectionCommonDef):
InfBianryCommonItemObj.SetFamily(ItemContent[3])
else:
InfBianryCommonItemObj.SetTarget(ItemContent[3])
-
+
if len(ItemContent) >= 5:
#
- # TagName entries are build system specific. If there
- # is content in the entry, the tool must exit
+ # TagName entries are build system specific. If there
+ # is content in the entry, the tool must exit
# gracefully with an error message that indicates build
- # system specific content cannot be distributed using
+ # system specific content cannot be distributed using
# the UDP
#
if BinaryFileType != 'FREEFORM':
@@ -435,7 +435,7 @@ class InfBinariesObject(InfSectionCommonDef):
ExtraData=CurrentLineOfItem.GetLineString())
else:
InfBianryCommonItemObj.SetFamily(ItemContent[4])
-
+
if len(ItemContent) >= 6:
#
# Add FeatureFlagExp
@@ -449,7 +449,7 @@ class InfBinariesObject(InfSectionCommonDef):
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
#
- # Validate Feature Flag Express
+ # Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(ItemContent[5].strip())
if not FeatureFlagRtv[0]:
@@ -468,7 +468,7 @@ class InfBinariesObject(InfSectionCommonDef):
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
-
+
if len(ItemContent) == 7:
if ItemContent[6].strip() == '':
Logger.Error("InfParser",
@@ -478,7 +478,7 @@ class InfBinariesObject(InfSectionCommonDef):
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
#
- # Validate Feature Flag Express
+ # Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(ItemContent[6].strip())
if not FeatureFlagRtv[0]:
@@ -495,15 +495,15 @@ class InfBinariesObject(InfSectionCommonDef):
#
# Determine binary file name duplicate. Follow below rule:
#
- # A binary filename must not be duplicated within
- # a [Binaries] section. A binary filename may appear in
- # multiple architectural [Binaries] sections. A binary
- # filename listed in an architectural [Binaries] section
- # must not be listed in the common architectural
+ # A binary filename must not be duplicated within
+ # a [Binaries] section. A binary filename may appear in
+ # multiple architectural [Binaries] sections. A binary
+ # filename listed in an architectural [Binaries] section
+ # must not be listed in the common architectural
# [Binaries] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
# for Item in self.Binaries:
# if Item.GetFileName() == InfBianryCommonItemObj.GetFileName():
# ItemSupArchList = Item.GetSupArchList()
@@ -537,7 +537,7 @@ class InfBinariesObject(InfSectionCommonDef):
for ArchItem in ArchList:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
@@ -546,7 +546,7 @@ class InfBinariesObject(InfSectionCommonDef):
if len(UiInf) > 0:
#
# Check UI
- #
+ #
for UiItem in UiInf:
IsValidFileFlag = False
InfBianryUiItemObj = None
@@ -630,7 +630,7 @@ class InfBinariesObject(InfSectionCommonDef):
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
#
- # Validate Feature Flag Express
+ # Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(UiContent[3].strip())
if not FeatureFlagRtv[0]:
@@ -647,15 +647,15 @@ class InfBinariesObject(InfSectionCommonDef):
#
# Determine binary file name duplicate. Follow below rule:
#
- # A binary filename must not be duplicated within
- # a [Binaries] section. A binary filename may appear in
- # multiple architectural [Binaries] sections. A binary
- # filename listed in an architectural [Binaries] section
- # must not be listed in the common architectural
+ # A binary filename must not be duplicated within
+ # a [Binaries] section. A binary filename may appear in
+ # multiple architectural [Binaries] sections. A binary
+ # filename listed in an architectural [Binaries] section
+ # must not be listed in the common architectural
# [Binaries] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
# for Item in self.Binaries:
# if Item.GetFileName() == InfBianryUiItemObj.GetFileName():
# ItemSupArchList = Item.GetSupArchList()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py
index 391669fe64..6011fbf523 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [BuildOptions] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [BuildOptions] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -16,7 +16,7 @@
InfBuildOptionObject
'''
-from Library import GlobalData
+from Library import GlobalData
from Object.Parser.InfCommonObject import InfSectionCommonDef
@@ -25,17 +25,17 @@ class InfBuildOptionItem():
self.Content = ''
self.SupArchList = []
self.AsBuildList = []
-
+
def SetContent(self, Content):
self.Content = Content
def GetContent(self):
return self.Content
-
+
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
-
+
#
# AsBuild Information
#
@@ -43,12 +43,12 @@ class InfBuildOptionItem():
self.AsBuildList = AsBuildList
def GetAsBuildList(self):
return self.AsBuildList
-
-
+
+
## INF BuildOption section
# Macro define is not permitted for this section.
#
-#
+#
class InfBuildOptionsObject(InfSectionCommonDef):
def __init__(self):
self.BuildOptions = []
@@ -56,38 +56,38 @@ class InfBuildOptionsObject(InfSectionCommonDef):
## SetBuildOptions function
#
# For BuildOptionName, need to validate it's format
- # For BuildOptionValue, just ignore it.
+ # For BuildOptionValue, just ignore it.
#
# @param Arch Indicated which arch of build options belong to.
# @param BuildOptCont A list contain BuildOption related information.
# The element in the list contain 3 members.
# BuildOptionName, BuildOptionValue and IsReplace
# flag.
- #
+ #
# @return True Build options set/validate successfully
# @return False Build options set/validate failed
#
def SetBuildOptions(self, BuildOptCont, ArchList = None, SectionContent = ''):
- if not GlobalData.gIS_BINARY_INF:
-
+ if not GlobalData.gIS_BINARY_INF:
+
if SectionContent.strip() != '':
InfBuildOptionItemObj = InfBuildOptionItem()
InfBuildOptionItemObj.SetContent(SectionContent)
InfBuildOptionItemObj.SetSupArchList(ArchList)
-
+
self.BuildOptions.append(InfBuildOptionItemObj)
else:
#
- # For AsBuild INF file
+ # For AsBuild INF file
#
if len(BuildOptCont) >= 1:
InfBuildOptionItemObj = InfBuildOptionItem()
InfBuildOptionItemObj.SetAsBuildList(BuildOptCont)
InfBuildOptionItemObj.SetSupArchList(ArchList)
self.BuildOptions.append(InfBuildOptionItemObj)
-
+
return True
-
+
def GetBuildOptions(self):
- return self.BuildOptions \ No newline at end of file
+ return self.BuildOptions
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py
index 217b0941da..49be7d9f65 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define common class objects for INF file.
+# This file is used to define common class objects for INF file.
# It will consumed by InfParser
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,9 +17,9 @@ InfCommonObject
'''
## InfLineCommentObject
-#
-# Comment Object for any line in the INF file
-#
+#
+# Comment Object for any line in the INF file
+#
# #
# # HeaderComment
# #
@@ -29,21 +29,21 @@ class InfLineCommentObject():
def __init__(self):
self.HeaderComments = ''
self.TailComments = ''
-
+
def SetHeaderComments(self, HeaderComments):
self.HeaderComments = HeaderComments
-
+
def GetHeaderComments(self):
return self.HeaderComments
-
+
def SetTailComments(self, TailComments):
self.TailComments = TailComments
def GetTailComments(self):
- return self.TailComments
-
+ return self.TailComments
+
## CurrentLine
-#
+#
class CurrentLine():
def __init__(self):
self.LineNo = ''
@@ -51,48 +51,48 @@ class CurrentLine():
self.FileName = ''
## SetLineNo
- #
- # @param LineNo: LineNo
- #
+ #
+ # @param LineNo: LineNo
+ #
def SetLineNo(self, LineNo):
self.LineNo = LineNo
-
+
## GetLineNo
- #
+ #
def GetLineNo(self):
return self.LineNo
## SetLineString
- #
- # @param LineString: Line String content
- #
+ #
+ # @param LineString: Line String content
+ #
def SetLineString(self, LineString):
self.LineString = LineString
-
+
## GetLineString
- #
+ #
def GetLineString(self):
return self.LineString
## SetFileName
- #
+ #
# @param FileName: File Name
- #
+ #
def SetFileName(self, FileName):
self.FileName = FileName
-
+
## GetFileName
- #
+ #
def GetFileName(self):
return self.FileName
-
-##
+
+##
# Inf Section common data
#
class InfSectionCommonDef():
def __init__(self):
#
- # #
+ # #
# # HeaderComments at here
# #
# [xxSection] TailComments at here
@@ -104,7 +104,7 @@ class InfSectionCommonDef():
# The support arch list of this section
#
self.SupArchList = []
-
+
#
# Store all section content
# Key is supported Arch
@@ -112,51 +112,51 @@ class InfSectionCommonDef():
self.AllContent = {}
## SetHeaderComments
- #
+ #
# @param HeaderComments: HeaderComments
- #
+ #
def SetHeaderComments(self, HeaderComments):
self.HeaderComments = HeaderComments
## GetHeaderComments
- #
+ #
def GetHeaderComments(self):
return self.HeaderComments
## SetTailComments
- #
+ #
# @param TailComments: TailComments
- #
+ #
def SetTailComments(self, TailComments):
self.TailComments = TailComments
## GetTailComments
- #
+ #
def GetTailComments(self):
return self.TailComments
## SetSupArchList
- #
+ #
# @param Arch: Arch
- #
+ #
def SetSupArchList(self, Arch):
if Arch not in self.SupArchList:
self.SupArchList.append(Arch)
## GetSupArchList
- #
+ #
def GetSupArchList(self):
return self.SupArchList
## SetAllContent
- #
+ #
# @param ArchList: ArchList
# @param Content: Content
- #
+ #
def SetAllContent(self, Content):
self.AllContent = Content
-
+
## GetAllContent
- #
+ #
def GetAllContent(self):
return self.AllContent
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py
index d565e65a65..ded7965fe7 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define common class objects of [Defines] section for INF file.
+# This file is used to define common class objects of [Defines] section for INF file.
# It will consumed by InfParser
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -19,13 +19,13 @@ InfDefineCommonObject
from Object.Parser.InfCommonObject import InfLineCommentObject
## InfDefineImageExeParamItem
-#
+#
class InfDefineImageExeParamItem():
def __init__(self):
self.CName = ''
self.FeatureFlagExp = ''
self.Comments = InfLineCommentObject()
-
+
def SetCName(self, CName):
self.CName = CName
def GetCName(self):
@@ -36,49 +36,49 @@ class InfDefineImageExeParamItem():
return self.FeatureFlagExp
## InfDefineEntryPointItem
-#
+#
class InfDefineEntryPointItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
## InfDefineUnloadImageItem
-#
+#
class InfDefineUnloadImageItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
## InfDefineConstructorItem
-#
+#
class InfDefineConstructorItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
self.SupModList = []
-
+
def SetSupModList(self, SupModList):
self.SupModList = SupModList
def GetSupModList(self):
return self.SupModList
## InfDefineDestructorItem
-#
+#
class InfDefineDestructorItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
self.SupModList = []
-
+
def SetSupModList(self, SupModList):
self.SupModList = SupModList
def GetSupModList(self):
return self.SupModList
-
+
## InfDefineLibraryItem
-#
+#
class InfDefineLibraryItem():
def __init__(self):
self.LibraryName = ''
self.Types = []
self.Comments = InfLineCommentObject()
-
+
def SetLibraryName(self, Name):
self.LibraryName = Name
def GetLibraryName(self):
@@ -86,4 +86,4 @@ class InfDefineLibraryItem():
def SetTypes(self, Type):
self.Types = Type
def GetTypes(self):
- return self.Types \ No newline at end of file
+ return self.Types
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
index 9d27a92cd6..8998d69889 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of [Defines] section for INF file.
+# This file is used to define class objects of [Defines] section for INF file.
# It will consumed by InfParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -21,21 +21,21 @@ import re
from Logger import StringTable as ST
from Logger import ToolError
-from Library import GlobalData
+from Library import GlobalData
from Library import DataType as DT
from Library.StringUtils import GetSplitValueList
from Library.Misc import CheckGuidRegFormat
from Library.Misc import Sdict
from Library.Misc import ConvPathFromAbsToRel
from Library.Misc import ValidateUNIFilePath
-from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.ParserValidate import IsValidWord
-from Library.ParserValidate import IsValidInfMoudleType
-from Library.ParserValidate import IsValidHex
-from Library.ParserValidate import IsValidHexVersion
-from Library.ParserValidate import IsValidDecVersion
-from Library.ParserValidate import IsValidCVariableName
-from Library.ParserValidate import IsValidBoolType
+from Library.ParserValidate import IsValidInfMoudleType
+from Library.ParserValidate import IsValidHex
+from Library.ParserValidate import IsValidHexVersion
+from Library.ParserValidate import IsValidDecVersion
+from Library.ParserValidate import IsValidCVariableName
+from Library.ParserValidate import IsValidBoolType
from Library.ParserValidate import IsValidPath
from Library.ParserValidate import IsValidFamily
from Library.ParserValidate import IsValidIdentifier
@@ -47,7 +47,7 @@ from Object.Parser.InfMisc import ErrorInInf
from Object.Parser.InfDefineCommonObject import InfDefineLibraryItem
from Object.Parser.InfDefineCommonObject import InfDefineEntryPointItem
from Object.Parser.InfDefineCommonObject import InfDefineUnloadImageItem
-from Object.Parser.InfDefineCommonObject import InfDefineConstructorItem
+from Object.Parser.InfDefineCommonObject import InfDefineConstructorItem
from Object.Parser.InfDefineCommonObject import InfDefineDestructorItem
class InfDefSectionOptionRomInfo():
@@ -65,9 +65,9 @@ class InfDefSectionOptionRomInfo():
if self.PciVendorId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_VENDOR_ID),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The PciVendorId should be hex string.
+ # The PciVendorId should be hex string.
#
if (IsValidHex(PciVendorId)):
self.PciVendorId = InfDefMember()
@@ -78,10 +78,10 @@ class InfDefSectionOptionRomInfo():
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciVendorId),
LineInfo=self.CurrentLine)
return False
-
+
def GetPciVendorId(self):
return self.PciVendorId
-
+
def SetPciDeviceId(self, PciDeviceId, Comments):
#
# Value has been set before.
@@ -89,23 +89,23 @@ class InfDefSectionOptionRomInfo():
if self.PciDeviceId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_DEVICE_ID),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The PciDeviceId should be hex string.
+ # The PciDeviceId should be hex string.
#
if (IsValidHex(PciDeviceId)):
self.PciDeviceId = InfDefMember()
self.PciDeviceId.SetValue(PciDeviceId)
- self.PciDeviceId.Comments = Comments
+ self.PciDeviceId.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciDeviceId),
LineInfo=self.CurrentLine)
return False
-
+
def GetPciDeviceId(self):
return self.PciDeviceId
-
+
def SetPciClassCode(self, PciClassCode, Comments):
#
# Value has been set before.
@@ -113,9 +113,9 @@ class InfDefSectionOptionRomInfo():
if self.PciClassCode is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_CLASS_CODE),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The PciClassCode should be 4 bytes hex string.
+ # The PciClassCode should be 4 bytes hex string.
#
if (IsValidHex(PciClassCode)):
self.PciClassCode = InfDefMember()
@@ -127,10 +127,10 @@ class InfDefSectionOptionRomInfo():
(PciClassCode),
LineInfo=self.CurrentLine)
return False
-
+
def GetPciClassCode(self):
return self.PciClassCode
-
+
def SetPciRevision(self, PciRevision, Comments):
#
# Value has been set before.
@@ -138,9 +138,9 @@ class InfDefSectionOptionRomInfo():
if self.PciRevision is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_REVISION),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The PciRevision should be 4 bytes hex string.
+ # The PciRevision should be 4 bytes hex string.
#
if (IsValidHex(PciRevision)):
self.PciRevision = InfDefMember()
@@ -151,10 +151,10 @@ class InfDefSectionOptionRomInfo():
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciRevision),
LineInfo=self.CurrentLine)
return False
-
+
def GetPciRevision(self):
return self.PciRevision
-
+
def SetPciCompress(self, PciCompress, Comments):
#
# Value has been set before.
@@ -163,9 +163,9 @@ class InfDefSectionOptionRomInfo():
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_COMPRESS),
LineInfo=self.CurrentLine)
return False
-
+
#
- # The PciCompress should be 'TRUE' or 'FALSE'.
+ # The PciCompress should be 'TRUE' or 'FALSE'.
#
if (PciCompress == 'TRUE' or PciCompress == 'FALSE'):
self.PciCompress = InfDefMember()
@@ -175,9 +175,9 @@ class InfDefSectionOptionRomInfo():
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciCompress),
LineInfo=self.CurrentLine)
- return False
+ return False
def GetPciCompress(self):
- return self.PciCompress
+ return self.PciCompress
##
# INF [Define] section Object
#
@@ -210,15 +210,15 @@ class InfDefSection(InfDefSectionOptionRomInfo):
## SetHeadComment
#
# @param BaseName: BaseName
- #
+ #
def SetBaseName(self, BaseName, Comments):
#
# Value has been set before.
#
- if self.BaseName is not None:
+ if self.BaseName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_BASE_NAME),
LineInfo=self.CurrentLine)
- return False
+ return False
if not (BaseName == '' or BaseName is None):
if IsValidWord(BaseName) and not BaseName.startswith("_"):
self.BaseName = InfDefMember()
@@ -231,26 +231,26 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return False
## GetBaseName
- #
+ #
def GetBaseName(self):
return self.BaseName
## SetFileGuid
#
# @param FileGuid: FileGuid
- #
+ #
def SetFileGuid(self, FileGuid, Comments):
#
# Value has been set before.
#
- if self.FileGuid is not None:
+ if self.FileGuid is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_FILE_GUID),
LineInfo=self.CurrentLine)
- return False
+ return False
#
# Do verification of GUID content/format
- #
+ #
if (CheckGuidRegFormat(FileGuid)):
self.FileGuid = InfDefMember()
self.FileGuid.SetValue(FileGuid)
@@ -262,23 +262,23 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return False
## GetFileGuid
- #
+ #
def GetFileGuid(self):
return self.FileGuid
## SetModuleType
#
# @param ModuleType: ModuleType
- #
+ #
def SetModuleType(self, ModuleType, Comments):
#
# Value has been set before.
#
- if self.ModuleType is not None:
+ if self.ModuleType is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_MODULE_TYPE),
LineInfo=self.CurrentLine)
- return False
+ return False
#
# Valid Module Type or not
#
@@ -289,7 +289,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
self.ModuleType.CurrentLine.SetLineNo(self.CurrentLine[1])
self.ModuleType.CurrentLine.SetLineString(self.CurrentLine[2])
self.ModuleType.CurrentLine.SetFileName(self.CurrentLine[0])
- self.ModuleType.Comments = Comments
+ self.ModuleType.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%\
@@ -298,14 +298,14 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return False
## GetModuleType
- #
+ #
def GetModuleType(self):
return self.ModuleType
-
+
## SetModuleUniFileName
#
# @param ModuleUniFileName: ModuleUniFileName
- #
+ #
def SetModuleUniFileName(self, ModuleUniFileName, Comments):
if Comments:
pass
@@ -315,25 +315,25 @@ class InfDefSection(InfDefSectionOptionRomInfo):
self.ModuleUniFileName = ModuleUniFileName
## GetModuleType
- #
+ #
def GetModuleUniFileName(self):
return self.ModuleUniFileName
-
+
## SetInfVersion
#
# @param InfVersion: InfVersion
- #
+ #
def SetInfVersion(self, InfVersion, Comments):
#
# Value has been set before.
#
- if self.InfVersion is not None:
+ if self.InfVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_INF_VERSION),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The InfVersion should be 4 bytes hex string.
+ # The InfVersion should be 4 bytes hex string.
#
if (IsValidHex(InfVersion)):
if (InfVersion < '0x00010005'):
@@ -356,25 +356,25 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return True
## GetInfVersion
- #
+ #
def GetInfVersion(self):
return self.InfVersion
## SetEdkReleaseVersion
#
# @param EdkReleaseVersion: EdkReleaseVersion
- #
+ #
def SetEdkReleaseVersion(self, EdkReleaseVersion, Comments):
#
# Value has been set before.
#
- if self.EdkReleaseVersion is not None:
+ if self.EdkReleaseVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The EdkReleaseVersion should be 4 bytes hex string.
+ # The EdkReleaseVersion should be 4 bytes hex string.
#
if IsValidHexVersion(EdkReleaseVersion) or \
IsValidDecVersionVal(EdkReleaseVersion):
@@ -389,25 +389,25 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return False
## GetEdkReleaseVersion
- #
+ #
def GetEdkReleaseVersion(self):
- return self.EdkReleaseVersion
+ return self.EdkReleaseVersion
## SetUefiSpecificationVersion
#
# @param UefiSpecificationVersion: UefiSpecificationVersion
- #
+ #
def SetUefiSpecificationVersion(self, UefiSpecificationVersion, Comments):
#
# Value has been set before.
#
- if self.UefiSpecificationVersion is not None:
+ if self.UefiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The EdkReleaseVersion should be 4 bytes hex string.
+ # The EdkReleaseVersion should be 4 bytes hex string.
#
if IsValidHexVersion(UefiSpecificationVersion) or \
IsValidDecVersionVal(UefiSpecificationVersion):
@@ -422,25 +422,25 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return False
## GetUefiSpecificationVersion
- #
+ #
def GetUefiSpecificationVersion(self):
- return self.UefiSpecificationVersion
+ return self.UefiSpecificationVersion
## SetPiSpecificationVersion
#
# @param PiSpecificationVersion: PiSpecificationVersion
- #
+ #
def SetPiSpecificationVersion(self, PiSpecificationVersion, Comments):
#
# Value has been set before.
#
- if self.PiSpecificationVersion is not None:
+ if self.PiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
- return False
+ return False
#
- # The EdkReleaseVersion should be 4 bytes hex string.
+ # The EdkReleaseVersion should be 4 bytes hex string.
#
if IsValidHexVersion(PiSpecificationVersion) or \
IsValidDecVersionVal(PiSpecificationVersion):
@@ -451,24 +451,24 @@ class InfDefSection(InfDefSectionOptionRomInfo):
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
%(PiSpecificationVersion),
- LineInfo=self.CurrentLine)
+ LineInfo=self.CurrentLine)
return False
## GetPiSpecificationVersion
- #
+ #
def GetPiSpecificationVersion(self):
return self.PiSpecificationVersion
## SetLibraryClass
#
# @param LibraryClass: LibraryClass
- #
+ #
def SetLibraryClass(self, LibraryClass, Comments):
ValueList = GetSplitValueList(LibraryClass)
- Name = ValueList[0]
+ Name = ValueList[0]
if IsValidWord(Name):
InfDefineLibraryItemObj = InfDefineLibraryItem()
- InfDefineLibraryItemObj.SetLibraryName(Name)
+ InfDefineLibraryItemObj.SetLibraryName(Name)
InfDefineLibraryItemObj.Comments = Comments
if len(ValueList) == 2:
Type = ValueList[1]
@@ -479,23 +479,23 @@ class InfDefSection(InfDefSectionOptionRomInfo):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Item),
LineInfo=self.CurrentLine)
return False
- InfDefineLibraryItemObj.SetTypes(TypeList)
- self.LibraryClass.append(InfDefineLibraryItemObj)
+ InfDefineLibraryItemObj.SetTypes(TypeList)
+ self.LibraryClass.append(InfDefineLibraryItemObj)
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
LineInfo=self.CurrentLine)
return False
-
+
return True
-
+
def GetLibraryClass(self):
return self.LibraryClass
-
+
def SetVersionString(self, VersionString, Comments):
#
# Value has been set before.
#
- if self.VersionString is not None:
+ if self.VersionString is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_VERSION_STRING),
LineInfo=self.CurrentLine)
@@ -503,25 +503,25 @@ class InfDefSection(InfDefSectionOptionRomInfo):
if not IsValidDecVersion(VersionString):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
%(VersionString),
- LineInfo=self.CurrentLine)
+ LineInfo=self.CurrentLine)
self.VersionString = InfDefMember()
self.VersionString.SetValue(VersionString)
self.VersionString.Comments = Comments
return True
-
+
def GetVersionString(self):
return self.VersionString
-
+
def SetPcdIsDriver(self, PcdIsDriver, Comments):
#
# Value has been set before.
#
- if self.PcdIsDriver is not None:
+ if self.PcdIsDriver is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PCD_IS_DRIVER),
LineInfo=self.CurrentLine)
- return False
+ return False
if PcdIsDriver == 'PEI_PCD_DRIVER' or PcdIsDriver == 'DXE_PCD_DRIVER':
self.PcdIsDriver = InfDefMember()
self.PcdIsDriver.SetValue(PcdIsDriver)
@@ -531,20 +531,20 @@ class InfDefSection(InfDefSectionOptionRomInfo):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PcdIsDriver),
LineInfo=self.CurrentLine)
return False
-
+
def GetPcdIsDriver(self):
return self.PcdIsDriver
-
+
#
# SetEntryPoint
- #
+ #
def SetEntryPoint(self, EntryPoint, Comments):
#
# It can be a list
#
ValueList = []
TokenList = GetSplitValueList(EntryPoint, DT.TAB_VALUE_SPLIT)
- ValueList[0:len(TokenList)] = TokenList
+ ValueList[0:len(TokenList)] = TokenList
InfDefineEntryPointItemObj = InfDefineEntryPointItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
@@ -557,33 +557,33 @@ class InfDefSection(InfDefSectionOptionRomInfo):
(ValueList[1]),
LineInfo=self.CurrentLine)
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[1].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%\
(FeatureFlagRtv[1]),
- LineInfo=self.CurrentLine)
- InfDefineEntryPointItemObj.SetFeatureFlagExp(ValueList[1])
+ LineInfo=self.CurrentLine)
+ InfDefineEntryPointItemObj.SetFeatureFlagExp(ValueList[1])
if len(ValueList) > 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(EntryPoint),
LineInfo=self.CurrentLine)
- InfDefineEntryPointItemObj.Comments = Comments
- self.EntryPoint.append(InfDefineEntryPointItemObj)
-
+ InfDefineEntryPointItemObj.Comments = Comments
+ self.EntryPoint.append(InfDefineEntryPointItemObj)
+
def GetEntryPoint(self):
return self.EntryPoint
#
# SetUnloadImages
- #
+ #
def SetUnloadImages(self, UnloadImages, Comments):
#
# It can be a list
#
ValueList = []
TokenList = GetSplitValueList(UnloadImages, DT.TAB_VALUE_SPLIT)
- ValueList[0:len(TokenList)] = TokenList
+ ValueList[0:len(TokenList)] = TokenList
InfDefineUnloadImageItemObj = InfDefineUnloadImageItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
@@ -594,33 +594,33 @@ class InfDefSection(InfDefSectionOptionRomInfo):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
LineInfo=self.CurrentLine)
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[1].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
LineInfo=self.CurrentLine)
InfDefineUnloadImageItemObj.SetFeatureFlagExp(ValueList[1])
-
+
if len(ValueList) > 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(UnloadImages),
LineInfo=self.CurrentLine)
InfDefineUnloadImageItemObj.Comments = Comments
self.UnloadImages.append(InfDefineUnloadImageItemObj)
-
+
def GetUnloadImages(self):
return self.UnloadImages
#
# SetConstructor
- #
+ #
def SetConstructor(self, Constructor, Comments):
#
# It can be a list
#
ValueList = []
TokenList = GetSplitValueList(Constructor, DT.TAB_VALUE_SPLIT)
- ValueList[0:len(TokenList)] = TokenList
+ ValueList[0:len(TokenList)] = TokenList
InfDefineConstructorItemObj = InfDefineConstructorItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
@@ -635,39 +635,39 @@ class InfDefSection(InfDefSectionOptionRomInfo):
if ModItem not in DT.MODULE_LIST:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%(ModItem),
LineInfo=self.CurrentLine)
- InfDefineConstructorItemObj.SetSupModList(ModList)
+ InfDefineConstructorItemObj.SetSupModList(ModList)
if len(ValueList) == 3:
if ValueList[2].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[2]),
LineInfo=self.CurrentLine)
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[2].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[2]),
LineInfo=self.CurrentLine)
InfDefineConstructorItemObj.SetFeatureFlagExp(ValueList[2])
-
+
if len(ValueList) > 3:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Constructor),
LineInfo=self.CurrentLine)
- InfDefineConstructorItemObj.Comments = Comments
- self.Constructor.append(InfDefineConstructorItemObj)
-
+ InfDefineConstructorItemObj.Comments = Comments
+ self.Constructor.append(InfDefineConstructorItemObj)
+
def GetConstructor(self):
return self.Constructor
#
# SetDestructor
- #
+ #
def SetDestructor(self, Destructor, Comments):
#
# It can be a list and only 1 set to TRUE
- #
+ #
ValueList = []
TokenList = GetSplitValueList(Destructor, DT.TAB_VALUE_SPLIT)
- ValueList[0:len(TokenList)] = TokenList
+ ValueList[0:len(TokenList)] = TokenList
InfDefineDestructorItemObj = InfDefineDestructorItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
@@ -682,30 +682,30 @@ class InfDefSection(InfDefSectionOptionRomInfo):
if ModItem not in DT.MODULE_LIST:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%(ModItem),
LineInfo=self.CurrentLine)
- InfDefineDestructorItemObj.SetSupModList(ModList)
+ InfDefineDestructorItemObj.SetSupModList(ModList)
if len(ValueList) == 3:
if ValueList[2].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[2]),
LineInfo=self.CurrentLine)
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[2].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
LineInfo=self.CurrentLine)
InfDefineDestructorItemObj.SetFeatureFlagExp(ValueList[2])
-
+
if len(ValueList) > 3:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Destructor),
LineInfo=self.CurrentLine)
-
- InfDefineDestructorItemObj.Comments = Comments
- self.Destructor.append(InfDefineDestructorItemObj)
-
+
+ InfDefineDestructorItemObj.Comments = Comments
+ self.Destructor.append(InfDefineDestructorItemObj)
+
def GetDestructor(self):
return self.Destructor
-
+
def SetShadow(self, Shadow, Comments):
#
# Value has been set before.
@@ -713,7 +713,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
if self.Shadow is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_SHADOW),
LineInfo=self.CurrentLine)
- return False
+ return False
if (IsValidBoolType(Shadow)):
self.Shadow = InfDefMember()
self.Shadow.SetValue(Shadow)
@@ -751,22 +751,22 @@ class InfDefSection(InfDefSectionOptionRomInfo):
IsValidFileFlag = False
ModulePath = os.path.split(self.CurrentLine[0])[0]
if IsValidPath(FileName, ModulePath):
- IsValidFileFlag = True
+ IsValidFileFlag = True
else:
ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(FileName),
LineInfo=self.CurrentLine)
return False
if IsValidFileFlag:
- FileName = ConvPathFromAbsToRel(FileName, GlobalData.gINF_MODULE_DIR)
+ FileName = ConvPathFromAbsToRel(FileName, GlobalData.gINF_MODULE_DIR)
self.CustomMakefile.append((Family, FileName, Comments))
IsValidFileFlag = False
return True
else:
return False
-
+
def GetCustomMakefile(self):
return self.CustomMakefile
-
+
#
# ["SPEC" <Spec> <EOL>]*{0,}
# <Spec> ::= <Word> "=" <VersionVal>
@@ -785,7 +785,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_NAME + ' Or ' + ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
LineInfo=self.CurrentLine)
Name = __ValueList[0].strip()
- Version = __ValueList[1].strip()
+ Version = __ValueList[1].strip()
if IsValidIdentifier(Name):
if IsValidDecVersion(Version):
self.Specification.append((Name, Version, Comments))
@@ -794,15 +794,15 @@ class InfDefSection(InfDefSectionOptionRomInfo):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Version),
LineInfo=self.CurrentLine)
return False
- else:
+ else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
LineInfo=self.CurrentLine)
- return False
+ return False
return True
def GetSpecification(self):
return self.Specification
-
+
#
# [<UefiHiiResource> <EOL>]{0,1}
# <UefiHiiResource> ::= "UEFI_HII_RESOURCE_SECTION" "=" <BoolType>
@@ -815,12 +815,12 @@ class InfDefSection(InfDefSectionOptionRomInfo):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND
%(DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION),
LineInfo=self.CurrentLine)
- return False
+ return False
if not (UefiHiiResourceSection == '' or UefiHiiResourceSection is None):
if (IsValidBoolType(UefiHiiResourceSection)):
self.UefiHiiResourceSection = InfDefMember()
self.UefiHiiResourceSection.SetValue(UefiHiiResourceSection)
- self.UefiHiiResourceSection.Comments = Comments
+ self.UefiHiiResourceSection.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(UefiHiiResourceSection),
@@ -828,28 +828,28 @@ class InfDefSection(InfDefSectionOptionRomInfo):
return False
else:
return False
-
+
def GetUefiHiiResourceSection(self):
return self.UefiHiiResourceSection
-
- def SetDpxSource(self, DpxSource, Comments):
+
+ def SetDpxSource(self, DpxSource, Comments):
#
# The MakefileName specified file should exist
#
IsValidFileFlag = False
ModulePath = os.path.split(self.CurrentLine[0])[0]
if IsValidPath(DpxSource, ModulePath):
- IsValidFileFlag = True
+ IsValidFileFlag = True
else:
ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(DpxSource),
LineInfo=self.CurrentLine)
return False
if IsValidFileFlag:
- DpxSource = ConvPathFromAbsToRel(DpxSource,
- GlobalData.gINF_MODULE_DIR)
+ DpxSource = ConvPathFromAbsToRel(DpxSource,
+ GlobalData.gINF_MODULE_DIR)
self.DpxSource.append((DpxSource, Comments))
IsValidFileFlag = False
- return True
+ return True
def GetDpxSource(self):
return self.DpxSource
@@ -867,7 +867,7 @@ gFUNCTION_MAPPING_FOR_DEFINE_SECTION = {
DT.TAB_INF_DEFINES_INF_VERSION : InfDefSection.SetInfVersion,
#
# Optional Fields
- #
+ #
DT.TAB_INF_DEFINES_MODULE_UNI_FILE : InfDefSection.SetModuleUniFileName,
DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION : InfDefSection.SetEdkReleaseVersion,
DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION : InfDefSection.SetUefiSpecificationVersion,
@@ -882,10 +882,10 @@ gFUNCTION_MAPPING_FOR_DEFINE_SECTION = {
DT.TAB_INF_DEFINES_SHADOW : InfDefSection.SetShadow,
DT.TAB_INF_DEFINES_PCI_VENDOR_ID : InfDefSection.SetPciVendorId,
DT.TAB_INF_DEFINES_PCI_DEVICE_ID : InfDefSection.SetPciDeviceId,
- DT.TAB_INF_DEFINES_PCI_CLASS_CODE : InfDefSection.SetPciClassCode,
+ DT.TAB_INF_DEFINES_PCI_CLASS_CODE : InfDefSection.SetPciClassCode,
DT.TAB_INF_DEFINES_PCI_REVISION : InfDefSection.SetPciRevision,
DT.TAB_INF_DEFINES_PCI_COMPRESS : InfDefSection.SetPciCompress,
- DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE : InfDefSection.SetCustomMakefile,
+ DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE : InfDefSection.SetCustomMakefile,
DT.TAB_INF_DEFINES_SPEC : InfDefSection.SetSpecification,
DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION : InfDefSection.SetUefiHiiResourceSection,
DT.TAB_INF_DEFINES_DPX_SOURCE : InfDefSection.SetDpxSource
@@ -908,7 +908,7 @@ class InfDefMember():
return self.Value
def SetValue(self, Value):
self.Value = Value
-
+
## InfDefObject
#
#
@@ -922,7 +922,7 @@ class InfDefObject(InfSectionCommonDef):
#
HasFoundInfVersionFalg = False
LineInfo = ['', -1, '']
- ArchListString = ' '.join(Arch)
+ ArchListString = ' '.join(Arch)
#
# Parse Define items.
#
@@ -947,7 +947,7 @@ class InfDefObject(InfSectionCommonDef):
ErrorCode=ToolError.EDK1_INF_ERROR,
RaiseError=True)
if Name == DT.TAB_INF_DEFINES_INF_VERSION:
- HasFoundInfVersionFalg = True
+ HasFoundInfVersionFalg = True
if not (Name == '' or Name is None):
#
# Process "SPEC" Keyword definition.
@@ -958,7 +958,7 @@ class InfDefObject(InfSectionCommonDef):
Name = "SPEC"
Value = SpecValue + " = " + Value
if ArchListString in self.Defines:
- DefineList = self.Defines[ArchListString]
+ DefineList = self.Defines[ArchListString]
LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
@@ -968,7 +968,7 @@ class InfDefObject(InfSectionCommonDef):
#
if Name not in gFUNCTION_MAPPING_FOR_DEFINE_SECTION.keys():
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID%(Name),
- LineInfo=LineInfo)
+ LineInfo=LineInfo)
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
if (ProcessFunc is not None):
@@ -988,7 +988,7 @@ class InfDefObject(InfSectionCommonDef):
LineInfo=LineInfo)
#
# Found the process function from mapping table.
- #
+ #
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
if (ProcessFunc is not None):
@@ -1000,9 +1000,9 @@ class InfDefObject(InfSectionCommonDef):
if not HasFoundInfVersionFalg:
ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
ErrorCode=ToolError.EDK1_INF_ERROR,
- RaiseError=True)
+ RaiseError=True)
return True
-
+
def GetDefines(self):
- return self.Defines
-
+ return self.Defines
+
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py
index 55d6bbc69e..b8362dfbb5 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Depex] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Depex] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -99,7 +99,7 @@ class InfDepexObject(InfSectionCommonDef):
#
# Validate Arch
- #
+ #
if IsValidArch(Arch.strip().upper()):
InfDepexItemIns.SetSupArch(Arch)
else:
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
index 4dfe75a2f1..52c03eed2a 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Guids] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Guids] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -18,10 +18,10 @@ InfGuidObject
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
-from Library.ExpressionValidate import IsValidFeatureFlagExp
-
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
from Library.Misc import Sdict
-from Library import DataType as DT
+from Library import DataType as DT
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
@@ -29,7 +29,7 @@ from Logger import StringTable as ST
class InfGuidItemCommentContent():
def __init__(self):
#
- # ## SOMETIMES_CONSUMES ## Variable:L"MemoryTypeInformation"
+ # ## SOMETIMES_CONSUMES ## Variable:L"MemoryTypeInformation"
# TailString.
#
#
@@ -48,27 +48,27 @@ class InfGuidItemCommentContent():
# TailString
#
self.HelpStringItem = ''
-
+
def SetUsageItem(self, UsageItem):
self.UsageItem = UsageItem
def GetUsageItem(self):
return self.UsageItem
-
+
def SetGuidTypeItem(self, GuidTypeItem):
self.GuidTypeItem = GuidTypeItem
def GetGuidTypeItem(self):
return self.GuidTypeItem
-
+
def SetVariableNameItem(self, VariableNameItem):
self.VariableNameItem = VariableNameItem
def GetVariableNameItem(self):
return self.VariableNameItem
-
+
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
-
+
class InfGuidItem():
def __init__(self):
self.Name = ''
@@ -78,22 +78,22 @@ class InfGuidItem():
#
self.CommentList = []
self.SupArchList = []
-
+
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
-
+
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
-
+
def SetCommentList(self, CommentList):
self.CommentList = CommentList
def GetCommentList(self):
return self.CommentList
-
+
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
@@ -120,17 +120,17 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
CommentItemGuidType, \
CommentItemVarString, \
CommentItemHelpText = \
- ParseComment(CommentItem,
- DT.ALL_USAGE_TOKENS,
- DT.GUID_TYPE_TOKENS,
- [],
+ ParseComment(CommentItem,
+ DT.ALL_USAGE_TOKENS,
+ DT.GUID_TYPE_TOKENS,
+ [],
True)
-
+
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
-
+
if Count == len(CommentsList):
if BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
@@ -138,7 +138,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
else:
BlockFlag = 3
if BlockFlag == -1:
- BlockFlag = 4
+ BlockFlag = 4
if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
@@ -150,15 +150,15 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
-
+
#
# Combine two comment line if they are generic comment
- #
+ #
if CommentItemUsage == CommentItemGuidType == PreUsage == PreGuidType == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
PreHelpText = CommentItemHelpText
-
- if BlockFlag == 4:
+
+ if BlockFlag == 4:
CommentItemIns = InfGuidItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetGuidTypeItem(CommentItemGuidType)
@@ -167,16 +167,16 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
CommentItemHelpText = CommentItemHelpText.strip(DT.END_OF_LINE)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
-
+
BlockFlag = -1
PreUsage = None
PreGuidType = None
PreHelpText = ''
-
+
elif BlockFlag == 3:
#
# Add previous help string
- #
+ #
CommentItemIns = InfGuidItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetGuidTypeItem(DT.ITEM_UNDEFINED)
@@ -195,17 +195,17 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
CommentItemHelpText = CommentItemHelpText.strip(DT.END_OF_LINE)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
-
+
BlockFlag = -1
PreUsage = None
PreGuidType = None
- PreHelpText = ''
-
+ PreHelpText = ''
+
else:
PreUsage = CommentItemUsage
PreGuidType = CommentItemGuidType
PreHelpText = CommentItemHelpText
-
+
InfGuidItemObj.SetCommentList(CommentInsList)
else:
#
@@ -215,7 +215,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetGuidTypeItem(DT.ITEM_UNDEFINED)
InfGuidItemObj.SetCommentList([CommentItemIns])
-
+
return InfGuidItemObj
## InfGuidObject
@@ -229,18 +229,18 @@ class InfGuidObject():
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
-
+
def SetGuid(self, GuidList, Arch = None):
__SupportArchList = []
for ArchItem in Arch:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
- ArchItem = 'COMMON'
-
+ ArchItem = 'COMMON'
+
__SupportArchList.append(ArchItem)
-
+
for Item in GuidList:
#
# Get Comment content of this protocol
@@ -250,77 +250,77 @@ class InfGuidObject():
CommentsList = Item[1]
CurrentLineOfItem = Item[2]
Item = Item[0]
- InfGuidItemObj = InfGuidItem()
+ InfGuidItemObj = InfGuidItem()
if len(Item) >= 1 and len(Item) <= 2:
#
# Only GuildName contained
#
if not IsValidCVariableName(Item[0]):
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if (Item[0] != ''):
InfGuidItemObj.SetName(Item[0])
else:
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_CNAME_MISSING,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) == 2:
#
# Contained CName and Feature Flag Express
# <statements> ::= <CName> ["|" <FeatureFlagExpress>]
- # For GUID entry.
+ # For GUID entry.
#
if Item[1].strip() == '':
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
- # Validate Feature Flag Express
+ # Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
if not FeatureFlagRtv[0]:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
InfGuidItemObj.SetFeatureFlagExp(Item[1])
if len(Item) != 1 and len(Item) != 2:
#
- # Invalid format of GUID statement
+ # Invalid format of GUID statement
#
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
-
+
InfGuidItemObj = ParseGuidComment(CommentsList, InfGuidItemObj)
InfGuidItemObj.SetSupArchList(__SupportArchList)
-
+
#
# Determine GUID name duplicate. Follow below rule:
#
- # A GUID must not be duplicated within a [Guids] section.
- # A GUID may appear in multiple architectural [Guids]
- # sections. A GUID listed in an architectural [Guids]
- # section must not be listed in the common architectural
+ # A GUID must not be duplicated within a [Guids] section.
+ # A GUID may appear in multiple architectural [Guids]
+ # sections. A GUID listed in an architectural [Guids]
+ # section must not be listed in the common architectural
# [Guids] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
for Item in self.Guids:
if Item.GetName() == InfGuidItemObj.GetName():
ItemSupArchList = Item.GetSupArchList()
@@ -337,17 +337,17 @@ class InfGuidObject():
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
-
+
if (InfGuidItemObj) in self.Guids:
- GuidList = self.Guids[InfGuidItemObj]
+ GuidList = self.Guids[InfGuidItemObj]
GuidList.append(InfGuidItemObj)
self.Guids[InfGuidItemObj] = GuidList
else:
GuidList = []
GuidList.append(InfGuidItemObj)
self.Guids[InfGuidItemObj] = GuidList
-
+
return True
-
+
def GetGuid(self):
return self.Guids
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
index dce75063df..d2a8a4adbf 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file header.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file header.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,16 +17,16 @@ InfHeaderObject
'''
## INF file header object
-#
+#
# A sample file header
#
# ## @file xxx.inf FileName
# # Abstract
-# #
+# #
# # Description
# #
# # Copyright
-# #
+# #
# # License
# #
#
@@ -41,7 +41,7 @@ class InfHeaderObject():
## SetFileName
#
# @param FileName: File Name
- #
+ #
def SetFileName(self, FileName):
if not (FileName == '' or FileName is None):
self.FileName = FileName
@@ -50,14 +50,14 @@ class InfHeaderObject():
return False
## GetFileName
- #
+ #
def GetFileName(self):
return self.FileName
## SetAbstract
- #
+ #
# @param Abstract: Abstract
- #
+ #
def SetAbstract(self, Abstract):
if not (Abstract == '' or Abstract is None):
self.Abstract = Abstract
@@ -66,14 +66,14 @@ class InfHeaderObject():
return False
## GetAbstract
- #
+ #
def GetAbstract(self):
- return self.Abstract
+ return self.Abstract
## SetDescription
- #
- # @param Description: Description content
- #
+ #
+ # @param Description: Description content
+ #
def SetDescription(self, Description):
if not (Description == '' or Description is None):
self.Description = Description
@@ -82,14 +82,14 @@ class InfHeaderObject():
return False
## GetAbstract
- #
+ #
def GetDescription(self):
- return self.Description
+ return self.Description
## SetCopyright
- #
- # @param Copyright: Copyright content
- #
+ #
+ # @param Copyright: Copyright content
+ #
def SetCopyright(self, Copyright):
if not (Copyright == '' or Copyright is None):
self.Copyright = Copyright
@@ -98,14 +98,14 @@ class InfHeaderObject():
return False
## GetCopyright
- #
+ #
def GetCopyright(self):
- return self.Copyright
+ return self.Copyright
## SetCopyright
- #
- # @param License: License content
- #
+ #
+ # @param License: License content
+ #
def SetLicense(self, License):
if not (License == '' or License is None):
self.License = License
@@ -114,6 +114,6 @@ class InfHeaderObject():
return False
## GetLicense
- #
+ #
def GetLicense(self):
- return self.License \ No newline at end of file
+ return self.License
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
index 5de1832b71..82eca12395 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [LibraryClasses] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [LibraryClasses] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -37,7 +37,7 @@ def GetArchModuleType(KeyList):
for (ArchItem, ModuleItem) in KeyList:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
@@ -118,7 +118,7 @@ class InfLibraryClassObject():
##SetLibraryClasses
#
- #
+ #
# @param HelpString: It can be a common comment or contain a recommend
# instance.
#
@@ -173,7 +173,7 @@ class InfLibraryClassObject():
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(LibItem[1].strip())
if not FeatureFlagRtv[0]:
@@ -202,15 +202,15 @@ class InfLibraryClassObject():
#
# Determine Library class duplicate. Follow below rule:
#
- # A library class keyword must not be duplicated within a
- # [LibraryClasses] section. Library class keywords may appear in
- # multiple architectural and module type [LibraryClasses] sections.
- # A library class keyword listed in an architectural or module type
- # [LibraryClasses] section must not be listed in the common
+ # A library class keyword must not be duplicated within a
+ # [LibraryClasses] section. Library class keywords may appear in
+ # multiple architectural and module type [LibraryClasses] sections.
+ # A library class keyword listed in an architectural or module type
+ # [LibraryClasses] section must not be listed in the common
# architectural or module type [LibraryClasses] section.
- #
+ #
# NOTE: This check will not report error now. But keep code for future enhancement.
- #
+ #
# for Item in self.LibraryClasses:
# if Item.GetLibName() == LibItemObj.GetLibName():
# ItemSupArchList = Item.GetSupArchList()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py b/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
index 4ed739d66f..c75e7d7a3e 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file miscellaneous.
-# Include BootMode/HOB/Event and others. It will consumed by InfParser.
+# This file is used to define class objects of INF file miscellaneous.
+# Include BootMode/HOB/Event and others. It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -23,7 +23,7 @@ from Library import DataType as DT
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Library.Misc import Sdict
-##
+##
# BootModeObject
#
class InfBootModeObject():
@@ -31,16 +31,16 @@ class InfBootModeObject():
self.SupportedBootModes = ''
self.HelpString = ''
self.Usage = ''
-
+
def SetSupportedBootModes(self, SupportedBootModes):
- self.SupportedBootModes = SupportedBootModes
+ self.SupportedBootModes = SupportedBootModes
def GetSupportedBootModes(self):
return self.SupportedBootModes
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
- return self.HelpString
+ return self.HelpString
def SetUsage(self, Usage):
self.Usage = Usage
@@ -54,18 +54,18 @@ class InfEventObject():
self.EventType = ''
self.HelpString = ''
self.Usage = ''
-
+
def SetEventType(self, EventType):
self.EventType = EventType
-
+
def GetEventType(self):
return self.EventType
-
+
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
- return self.HelpString
-
+ return self.HelpString
+
def SetUsage(self, Usage):
self.Usage = Usage
def GetUsage(self):
@@ -79,36 +79,36 @@ class InfHobObject():
self.Usage = ''
self.SupArchList = []
self.HelpString = ''
-
+
def SetHobType(self, HobType):
self.HobType = HobType
-
+
def GetHobType(self):
return self.HobType
-
+
def SetUsage(self, Usage):
self.Usage = Usage
def GetUsage(self):
return self.Usage
-
+
def SetSupArchList(self, ArchList):
self.SupArchList = ArchList
def GetSupArchList(self):
return self.SupArchList
-
+
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
-
+
##
# InfSpecialCommentObject
-#
+#
class InfSpecialCommentObject(InfSectionCommonDef):
def __init__(self):
self.SpecialComments = Sdict()
InfSectionCommonDef.__init__(self)
-
+
def SetSpecialComments(self, SepcialSectionList = None, Type = ''):
if Type == DT.TYPE_HOB_SECTION or \
Type == DT.TYPE_EVENT_SECTION or \
@@ -122,27 +122,27 @@ class InfSpecialCommentObject(InfSectionCommonDef):
ObjList = []
ObjList.append(Item)
self.SpecialComments[Type] = ObjList
-
+
return True
-
+
def GetSpecialComments(self):
return self.SpecialComments
## ErrorInInf
-#
+#
# An encapsulate of Error for INF parser.
-#
+#
def ErrorInInf(Message=None, ErrorCode=None, LineInfo=None, RaiseError=True):
if ErrorCode is None:
ErrorCode = ToolError.FORMAT_INVALID
if LineInfo is None:
LineInfo = ['', -1, '']
- Logger.Error("InfParser",
- ErrorCode,
- Message=Message,
- File=LineInfo[0],
+ Logger.Error("InfParser",
+ ErrorCode,
+ Message=Message,
+ File=LineInfo[0],
Line=LineInfo[1],
- ExtraData=LineInfo[2],
+ ExtraData=LineInfo[2],
RaiseError=RaiseError)
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
index bfac2b6b57..b2ca573549 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Packages] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Packages] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -19,14 +19,14 @@ InfPackageObject
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
-from Library import GlobalData
+from Library import GlobalData
from Library.Misc import Sdict
from Library.ParserValidate import IsValidPath
-from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.ExpressionValidate import IsValidFeatureFlagExp
class InfPackageItem():
- def __init__(self,
+ def __init__(self,
PackageName = '',
FeatureFlagExp = '',
HelpString = ''):
@@ -34,28 +34,28 @@ class InfPackageItem():
self.FeatureFlagExp = FeatureFlagExp
self.HelpString = HelpString
self.SupArchList = []
-
+
def SetPackageName(self, PackageName):
self.PackageName = PackageName
def GetPackageName(self):
return self.PackageName
-
+
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
-
+
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
-
+
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
-
+
## INF package section
#
#
@@ -67,18 +67,18 @@ class InfPackageObject():
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
-
+
def SetPackages(self, PackageData, Arch = None):
IsValidFileFlag = False
SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
- SupArchList.append(ArchItem)
-
+ SupArchList.append(ArchItem)
+
for PackageItem in PackageData:
PackageItemObj = InfPackageItem()
HelpStringObj = PackageItem[1]
@@ -86,7 +86,7 @@ class InfPackageObject():
PackageItem = PackageItem[0]
if HelpStringObj is not None:
HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
- PackageItemObj.SetHelpString(HelpString)
+ PackageItemObj.SetHelpString(HelpString)
if len(PackageItem) >= 1:
#
# Validate file exist/format.
@@ -94,67 +94,67 @@ class InfPackageObject():
if IsValidPath(PackageItem[0], ''):
IsValidFileFlag = True
elif IsValidPath(PackageItem[0], GlobalData.gINF_MODULE_DIR):
- IsValidFileFlag = True
+ IsValidFileFlag = True
elif IsValidPath(PackageItem[0], GlobalData.gWORKSPACE):
IsValidFileFlag = True
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(PackageItem[0]),
- File=CurrentLineOfPackItem[2],
- Line=CurrentLineOfPackItem[1],
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
return False
- if IsValidFileFlag:
+ if IsValidFileFlag:
PackageItemObj.SetPackageName(PackageItem[0])
if len(PackageItem) == 2:
#
# Validate Feature Flag Express
#
if PackageItem[1].strip() == '':
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
- File=CurrentLineOfPackItem[2],
- Line=CurrentLineOfPackItem[1],
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(PackageItem[1].strip())
if not FeatureFlagRtv[0]:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
- File=CurrentLineOfPackItem[2],
- Line=CurrentLineOfPackItem[1],
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
-
+
PackageItemObj.SetFeatureFlagExp(PackageItem[1].strip())
-
+
if len(PackageItem) > 2:
#
- # Invalid format of Package statement
+ # Invalid format of Package statement
#
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR,
- File=CurrentLineOfPackItem[2],
- Line=CurrentLineOfPackItem[1],
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
PackageItemObj.SetSupArchList(SupArchList)
-
+
#
# Determine package file name duplicate. Follow below rule:
#
- # A package filename must not be duplicated within a [Packages]
- # section. Package filenames may appear in multiple architectural
- # [Packages] sections. A package filename listed in an
+ # A package filename must not be duplicated within a [Packages]
+ # section. Package filenames may appear in multiple architectural
+ # [Packages] sections. A package filename listed in an
# architectural [Packages] section must not be listed in the common
# architectural [Packages] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
for Item in self.Packages:
if Item.GetPackageName() == PackageItemObj.GetPackageName():
ItemSupArchList = Item.GetSupArchList()
@@ -170,7 +170,7 @@ class InfPackageObject():
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
-
+
if (PackageItemObj) in self.Packages:
PackageList = self.Packages[PackageItemObj]
PackageList.append(PackageItemObj)
@@ -179,9 +179,9 @@ class InfPackageObject():
PackageList = []
PackageList.append(PackageItemObj)
self.Packages[PackageItemObj] = PackageList
-
+
return True
-
+
def GetPackages(self, Arch = None):
if Arch is None:
return self.Packages
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
index 3b9dfaed0c..b812d3253c 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Pcds] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Pcds] section.
+# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -42,7 +42,7 @@ from Object.Parser.InfPackagesObject import InfPackageItem
def ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList):
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
@@ -122,7 +122,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
BlockFlag = 4
#
# Combine two comment line if they are generic comment
- #
+ #
if CommentItemUsage == PreUsage == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
@@ -141,7 +141,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
elif BlockFlag == 3:
#
# Add previous help string
- #
+ #
CommentItemIns = InfPcdItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
@@ -171,7 +171,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
class InfPcdItemCommentContent():
def __init__(self):
#
- # ## SOMETIMES_CONSUMES ## HelpString
+ # ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
@@ -195,7 +195,7 @@ class InfPcdItemCommentContent():
#
# @param CName: Input value for CName, default is ''
# @param Token: Input value for Token, default is ''
-# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default
+# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default
# is ''
# @param DatumType: Input value for DatumType, default is ''
# @param MaxDatumSize: Input value for MaxDatumSize, default is ''
@@ -385,7 +385,7 @@ class InfPcdObject():
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(PcdItem[2].strip())
if not FeatureFlagRtv[0]:
@@ -478,7 +478,7 @@ def ParserPcdInfoInDec(String):
def SetValueDatumTypeMaxSizeToken(PcdItem, CurrentLineOfPcdItem, PcdItemObj, Arch, PackageInfo=None):
#
- # Package information not been generated currently, we need to parser INF file to get information.
+ # Package information not been generated currently, we need to parser INF file to get information.
#
if not PackageInfo:
PackageInfo = []
@@ -507,7 +507,7 @@ def SetValueDatumTypeMaxSizeToken(PcdItem, CurrentLineOfPcdItem, PcdItemObj, Arc
DecParser = GlobalData.gPackageDict[FullFileName]
#
- # Find PCD information.
+ # Find PCD information.
#
DecPcdsDict = DecParser.GetPcdSectionObject().ValueDict
for Key in DecPcdsDict.keys():
@@ -640,7 +640,7 @@ def ValidatePcdValueOnDatumType(Value, Type):
def SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj):
#
- # Only PCD Name specified
+ # Only PCD Name specified
# <PcdName> ::= <TokenSpaceGuidCName> "." <TokenCName>
#
PcdId = GetSplitValueList(PcdItem[0], DT.TAB_SPLIT)
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
index 0f865c5696..b871a5088a 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Ppis] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Ppis] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -18,17 +18,17 @@ InfPpiObject
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
-from Library.ExpressionValidate import IsValidFeatureFlagExp
-
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
from Library.Misc import Sdict
-from Library import DataType as DT
+from Library import DataType as DT
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
def ParsePpiComment(CommentsList, InfPpiItemObj):
PreNotify = None
- PreUsage = None
+ PreUsage = None
PreHelpText = ''
BlockFlag = -1
CommentInsList = []
@@ -39,22 +39,22 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
CommentItemNotify, \
CommentItemString, \
CommentItemHelpText = \
- ParseComment(CommentItem,
- DT.ALL_USAGE_TOKENS,
- DT.PPI_NOTIFY_TOKENS,
- ['PPI'],
+ ParseComment(CommentItem,
+ DT.ALL_USAGE_TOKENS,
+ DT.PPI_NOTIFY_TOKENS,
+ ['PPI'],
False)
-
+
#
- # To avoid PyLint error
+ # To avoid PyLint error
#
if CommentItemString:
pass
-
+
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
- CommentItemHelpText = DT.END_OF_LINE
+ CommentItemHelpText = DT.END_OF_LINE
#
# For the Last comment Item, set BlockFlag.
#
@@ -65,12 +65,12 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
else:
BlockFlag = 3
elif BlockFlag == -1:
- BlockFlag = 4
-
+ BlockFlag = 4
+
#
# Comment USAGE and NOTIFY information are "UNDEFINED"
#
- if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
+ if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
@@ -81,41 +81,41 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
BlockFlag = 3
#
# An item have Usage or Notify information and the first time get this information
- #
+ #
elif BlockFlag == -1:
BlockFlag = 4
-
+
#
# Combine two comment line if they are generic comment
- #
+ #
if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
#
# Store this information for next line may still need combine operation.
#
PreHelpText = CommentItemHelpText
-
- if BlockFlag == 4:
+
+ if BlockFlag == 4:
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
-
+
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
-
+
elif BlockFlag == 3:
#
# Add previous help string
- #
+ #
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
- PreHelpText += DT.END_OF_LINE
+ PreHelpText += DT.END_OF_LINE
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
@@ -126,7 +126,7 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
-
+
BlockFlag = -1
PreUsage = None
PreNotify = None
@@ -135,15 +135,15 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
PreUsage = CommentItemUsage
PreNotify = CommentItemNotify
PreHelpText = CommentItemHelpText
-
+
InfPpiItemObj.SetCommentList(CommentInsList)
-
+
return InfPpiItemObj
class InfPpiItemCommentContent():
def __init__(self):
#
- # ## SOMETIMES_CONSUMES ## HelpString
+ # ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
@@ -152,30 +152,30 @@ class InfPpiItemCommentContent():
self.HelpStringItem = ''
self.Notify = ''
self.CommentList = []
-
+
def SetUsage(self, UsageItem):
self.UsageItem = UsageItem
def GetUsage(self):
return self.UsageItem
-
+
def SetNotify(self, Notify):
if Notify != DT.ITEM_UNDEFINED:
self.Notify = 'true'
def GetNotify(self):
return self.Notify
-
+
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
-
+
class InfPpiItem():
def __init__(self):
self.Name = ''
- self.FeatureFlagExp = ''
+ self.FeatureFlagExp = ''
self.SupArchList = []
self.CommentList = []
-
+
def SetName(self, Name):
self.Name = Name
def GetName(self):
@@ -184,7 +184,7 @@ class InfPpiItem():
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
- return self.SupArchList
+ return self.SupArchList
def SetCommentList(self, CommentList):
self.CommentList = CommentList
@@ -206,17 +206,17 @@ class InfPpiObject():
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
-
+
def SetPpi(self, PpiList, Arch = None):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
- ArchItem = 'COMMON'
+ ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
-
+
for Item in PpiList:
#
# Get Comment content of this protocol
@@ -226,26 +226,26 @@ class InfPpiObject():
CommentsList = Item[1]
CurrentLineOfItem = Item[2]
Item = Item[0]
- InfPpiItemObj = InfPpiItem()
+ InfPpiItemObj = InfPpiItem()
if len(Item) >= 1 and len(Item) <= 2:
#
# Only CName contained
#
if not IsValidCVariableName(Item[0]):
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if (Item[0] != ''):
InfPpiItemObj.SetName(Item[0])
else:
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_CNAME_MISSING,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Have FeatureFlag information
@@ -254,14 +254,14 @@ class InfPpiObject():
#
# Contained CName and Feature Flag Express
# <statements> ::= <CName> ["|" <FeatureFlagExpress>]
- # Item[1] should not be empty
+ # Item[1] should not be empty
#
if Item[1].strip() == '':
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Validate Feature Flag Express for PPI entry
@@ -269,24 +269,24 @@ class InfPpiObject():
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
if not FeatureFlagRtv[0]:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
InfPpiItemObj.SetFeatureFlagExp(Item[1])
if len(Item) != 1 and len(Item) != 2:
#
- # Invalid format of Ppi statement
+ # Invalid format of Ppi statement
#
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
-
+
#
# Get/Set Usage and HelpString for PPI entry
#
@@ -297,20 +297,20 @@ class InfPpiObject():
CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
InfPpiItemObj.SetCommentList([CommentItemIns])
-
+
InfPpiItemObj.SetSupArchList(__SupArchList)
#
# Determine PPI name duplicate. Follow below rule:
#
- # A PPI must not be duplicated within a [Ppis] section.
- # A PPI may appear in multiple architectural [Ppis]
- # sections. A PPI listed in an architectural [Ppis]
- # section must not be listed in the common architectural
+ # A PPI must not be duplicated within a [Ppis] section.
+ # A PPI may appear in multiple architectural [Ppis]
+ # sections. A PPI listed in an architectural [Ppis]
+ # section must not be listed in the common architectural
# [Ppis] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
for Item in self.Ppis:
if Item.GetName() == InfPpiItemObj.GetName():
ItemSupArchList = Item.GetSupArchList()
@@ -324,9 +324,9 @@ class InfPpiObject():
if ItemArch.upper() == 'COMMON' or PpiItemObjArch.upper() == 'COMMON':
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
- #
+ #
pass
-
+
if (InfPpiItemObj) in self.Ppis:
PpiList = self.Ppis[InfPpiItemObj]
PpiList.append(InfPpiItemObj)
@@ -335,9 +335,9 @@ class InfPpiObject():
PpiList = []
PpiList.append(InfPpiItemObj)
self.Ppis[InfPpiItemObj] = PpiList
-
- return True
-
-
+
+ return True
+
+
def GetPpi(self):
return self.Ppis
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
index 6cadeb5a21..819d2ff3a7 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Protocols] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Protocols] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -18,8 +18,8 @@ InfProtocolObject
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
-from Library.ExpressionValidate import IsValidFeatureFlagExp
-
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
from Library.Misc import Sdict
from Object.Parser.InfMisc import ErrorInInf
@@ -40,20 +40,20 @@ def ParseProtocolComment(CommentsList, InfProtocolItemObj):
CommentItemNotify, \
CommentItemString, \
CommentItemHelpText = \
- ParseComment(CommentItem,
- DT.PROTOCOL_USAGE_TOKENS,
- DT.PROTOCOL_NOTIFY_TOKENS,
- ['PROTOCOL'],
+ ParseComment(CommentItem,
+ DT.PROTOCOL_USAGE_TOKENS,
+ DT.PROTOCOL_NOTIFY_TOKENS,
+ ['PROTOCOL'],
False)
-
+
if CommentItemString:
pass
-
+
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
-
+
if Count == len(CommentsList):
if BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
@@ -61,9 +61,9 @@ def ParseProtocolComment(CommentsList, InfProtocolItemObj):
else:
BlockFlag = 3
elif BlockFlag == -1:
- BlockFlag = 4
-
- if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
+ BlockFlag = 4
+
+ if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
@@ -74,36 +74,36 @@ def ParseProtocolComment(CommentsList, InfProtocolItemObj):
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
-
+
#
# Combine two comment line if they are generic comment
- #
+ #
if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
-
+
PreHelpText = CommentItemHelpText
-
- if BlockFlag == 4:
+
+ if BlockFlag == 4:
CommentItemIns = InfProtocolItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
-
+
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
-
+
elif BlockFlag == 3:
#
# Add previous help string
- #
+ #
CommentItemIns = InfProtocolItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
- PreHelpText += DT.END_OF_LINE
+ PreHelpText += DT.END_OF_LINE
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
@@ -114,25 +114,25 @@ def ParseProtocolComment(CommentsList, InfProtocolItemObj):
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
-
+
BlockFlag = -1
PreUsage = None
PreNotify = None
- PreHelpText = ''
-
+ PreHelpText = ''
+
else:
PreUsage = CommentItemUsage
PreNotify = CommentItemNotify
PreHelpText = CommentItemHelpText
-
+
InfProtocolItemObj.SetCommentList(CommentInsList)
-
+
return InfProtocolItemObj
class InfProtocolItemCommentContent():
def __init__(self):
#
- # ## SOMETIMES_CONSUMES ## HelpString
+ # ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
@@ -141,44 +141,44 @@ class InfProtocolItemCommentContent():
self.HelpStringItem = ''
self.Notify = ''
self.CommentList = []
-
+
def SetUsageItem(self, UsageItem):
self.UsageItem = UsageItem
def GetUsageItem(self):
return self.UsageItem
-
+
def SetNotify(self, Notify):
if Notify != DT.ITEM_UNDEFINED:
self.Notify = 'true'
def GetNotify(self):
return self.Notify
-
+
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
-
+
class InfProtocolItem():
def __init__(self):
self.Name = ''
self.FeatureFlagExp = ''
self.SupArchList = []
self.CommentList = []
-
+
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
-
+
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
-
+
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
- return self.SupArchList
+ return self.SupArchList
def SetCommentList(self, CommentList):
self.CommentList = CommentList
@@ -196,13 +196,13 @@ class InfProtocolObject():
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
-
+
def SetProtocol(self, ProtocolContent, Arch = None,):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
@@ -233,7 +233,7 @@ class InfProtocolObject():
if len(Item) == 2:
#
# Contained CName and Feature Flag Express
- # <statements> ::= <CName> ["|"
+ # <statements> ::= <CName> ["|"
# <FeatureFlagExpress>]
# For Protocol Object
#
@@ -248,14 +248,14 @@ class InfProtocolObject():
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
LineInfo=LineInfo)
InfProtocolItemObj.SetFeatureFlagExp(Item[1])
-
+
if len(Item) < 1 or len(Item) > 2:
#
- # Invalid format of Protocols statement
+ # Invalid format of Protocols statement
#
ErrorInInf(ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
LineInfo=LineInfo)
-
+
#
# Get/Set Usage and HelpString for Protocol entry
#
@@ -266,20 +266,20 @@ class InfProtocolObject():
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
InfProtocolItemObj.SetCommentList([CommentItemIns])
-
+
InfProtocolItemObj.SetSupArchList(__SupArchList)
-
+
#
# Determine protocol name duplicate. Follow below rule:
#
- # A protocol must not be duplicated within a [Protocols] section.
- # A protocol may appear in multiple architectural [Protocols]
- # sections. A protocol listed in an architectural [Protocols]
- # section must not be listed in the common architectural
+ # A protocol must not be duplicated within a [Protocols] section.
+ # A protocol may appear in multiple architectural [Protocols]
+ # sections. A protocol listed in an architectural [Protocols]
+ # section must not be listed in the common architectural
# [Protocols] section.
- #
+ #
# NOTE: This check will not report error now.
- #
+ #
for Item in self.Protocols:
if Item.GetName() == InfProtocolItemObj.GetName():
ItemSupArchList = Item.GetSupArchList()
@@ -294,8 +294,8 @@ class InfProtocolObject():
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
- pass
-
+ pass
+
if (InfProtocolItemObj) in self.Protocols:
ProcotolList = self.Protocols[InfProtocolItemObj]
ProcotolList.append(InfProtocolItemObj)
@@ -304,8 +304,8 @@ class InfProtocolObject():
ProcotolList = []
ProcotolList.append(InfProtocolItemObj)
self.Protocols[InfProtocolItemObj] = ProcotolList
-
+
return True
-
+
def GetProtocol(self):
return self.Protocols
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
index 285e89aacb..3eefde70a5 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [Sources] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [Sources] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -21,22 +21,22 @@ import os
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
-from Library import GlobalData
+from Library import GlobalData
from Library.Misc import Sdict
-from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.ExpressionValidate import IsValidFeatureFlagExp
from Object.Parser.InfCommonObject import InfSectionCommonDef
-from Library.Misc import ValidFile
+from Library.Misc import ValidFile
from Library.ParserValidate import IsValidFamily
from Library.ParserValidate import IsValidPath
-## __GenSourceInstance
+## __GenSourceInstance
+#
#
-#
def GenSourceInstance(Item, CurrentLineOfItem, ItemObj):
-
+
IsValidFileFlag = False
-
+
if len(Item) < 6 and len(Item) >= 1:
#
# File | Family | TagName | ToolCode | FeatureFlagExpr
@@ -46,43 +46,43 @@ def GenSourceInstance(Item, CurrentLineOfItem, ItemObj):
# Validate Feature Flag Express
#
if Item[4].strip() == '':
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
- # Validate FFE
+ # Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[4].strip())
if not FeatureFlagRtv[0]:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
ItemObj.SetFeatureFlagExp(Item[4])
if len(Item) >= 4:
if Item[3].strip() == '':
ItemObj.SetToolCode(Item[3])
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_TOOLCODE_NOT_PERMITTED%(Item[2]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
- ExtraData=CurrentLineOfItem[0])
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
if len(Item) >= 3:
if Item[2].strip() == '':
ItemObj.SetTagName(Item[2])
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED%(Item[2]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) >= 2:
if IsValidFamily(Item[1].strip()):
@@ -93,11 +93,11 @@ def GenSourceInstance(Item, CurrentLineOfItem, ItemObj):
Item[1] = ""
ItemObj.SetFamily(Item[1])
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_SOURCE_SECTION_FAMILY_INVALID%(Item[1]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) >= 1:
#
@@ -105,41 +105,41 @@ def GenSourceInstance(Item, CurrentLineOfItem, ItemObj):
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR, Item[0])))
if not (ValidFile(FullFileName) or ValidFile(Item[0])):
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_FILELIST_EXIST%(Item[0]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
-
+
#
# Validate file exist/format.
#
-
+
if IsValidPath(Item[0], GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Item[0]),
- File=CurrentLineOfItem[2],
- Line=CurrentLineOfItem[1],
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
return False
if IsValidFileFlag:
- ItemObj.SetSourceFileName(Item[0])
+ ItemObj.SetSourceFileName(Item[0])
else:
- Logger.Error("InfParser",
- ToolError.FORMAT_INVALID,
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR,
- File=CurrentLineOfItem[2],
+ File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
-
- return ItemObj
+
+ return ItemObj
## InfSourcesItemObject()
-#
+#
#
class InfSourcesItemObject():
def __init__(self, \
@@ -156,32 +156,32 @@ class InfSourcesItemObject():
self.HeaderString = ''
self.TailString = ''
self.SupArchList = []
-
+
def SetSourceFileName(self, SourceFilename):
self.SourceFileName = SourceFilename
def GetSourceFileName(self):
return self.SourceFileName
-
+
def SetFamily(self, Family):
self.Family = Family
def GetFamily(self):
return self.Family
-
+
def SetTagName(self, TagName):
self.TagName = TagName
def GetTagName(self):
return self.TagName
-
+
def SetToolCode(self, ToolCode):
self.ToolCode = ToolCode
def GetToolCode(self):
return self.ToolCode
-
+
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
-
+
def SetHeaderString(self, HeaderString):
self.HeaderString = HeaderString
def GetHeaderString(self):
@@ -191,11 +191,11 @@ class InfSourcesItemObject():
self.TailString = TailString
def GetTailString(self):
return self.TailString
-
+
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
- return self.SupArchList
+ return self.SupArchList
##
#
#
@@ -204,26 +204,26 @@ class InfSourcesObject(InfSectionCommonDef):
def __init__(self):
self.Sources = Sdict()
InfSectionCommonDef.__init__(self)
-
+
def SetSources(self, SourceList, Arch = None):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
- #
+ #
if (ArchItem == '' or ArchItem is None):
- ArchItem = 'COMMON'
- __SupArchList.append(ArchItem)
+ ArchItem = 'COMMON'
+ __SupArchList.append(ArchItem)
for Item in SourceList:
ItemObj = InfSourcesItemObject()
CurrentLineOfItem = Item[2]
- Item = Item[0]
-
+ Item = Item[0]
+
ItemObj = GenSourceInstance(Item, CurrentLineOfItem, ItemObj)
-
- ItemObj.SetSupArchList(__SupArchList)
-
+
+ ItemObj.SetSupArchList(__SupArchList)
+
if (ItemObj) in self.Sources:
SourceContent = self.Sources[ItemObj]
SourceContent.append(ItemObj)
@@ -232,8 +232,8 @@ class InfSourcesObject(InfSectionCommonDef):
SourceContent = []
SourceContent.append(ItemObj)
self.Sources[ItemObj] = SourceContent
-
+
return True
-
+
def GetSources(self):
return self.Sources
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py
index f9db2944a4..d83fd5321a 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py
@@ -1,12 +1,12 @@
## @file
-# This file is used to define class objects of INF file [UserExtension] section.
-# It will consumed by InfParser.
+# This file is used to define class objects of INF file [UserExtension] section.
+# It will consumed by InfParser.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -19,7 +19,7 @@ InfUserExtensionsObject
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
-from Library import GlobalData
+from Library import GlobalData
from Library.Misc import Sdict
@@ -32,27 +32,27 @@ class InfUserExtensionItem():
self.UserId = UserId
self.IdString = IdString
self.SupArchList = []
-
+
def SetContent(self, Content):
self.Content = Content
def GetContent(self):
return self.Content
-
+
def SetUserId(self, UserId):
self.UserId = UserId
def GetUserId(self):
return self.UserId
-
+
def SetIdString(self, IdString):
self.IdString = IdString
def GetIdString(self):
return self.IdString
-
+
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
-
+
##
#
#
@@ -60,74 +60,74 @@ class InfUserExtensionItem():
class InfUserExtensionObject():
def __init__(self):
self.UserExtension = Sdict()
-
+
def SetUserExtension(self, UserExtensionCont, IdContent=None, LineNo=None):
if not UserExtensionCont or UserExtensionCont == '':
return True
#
- # IdContent is a list contain UserId and IdString
+ # IdContent is a list contain UserId and IdString
# For this call the general section header parser, if no definition of
# IdString/UserId, it will return 'COMMON'
#
- for IdContentItem in IdContent:
+ for IdContentItem in IdContent:
InfUserExtensionItemObj = InfUserExtensionItem()
if IdContentItem[0] == 'COMMON':
UserId = ''
else:
UserId = IdContentItem[0]
-
+
if IdContentItem[1] == 'COMMON':
IdString = ''
else:
- IdString = IdContentItem[1]
-
+ IdString = IdContentItem[1]
+
#
# Fill UserExtensionObj members.
- #
+ #
InfUserExtensionItemObj.SetUserId(UserId)
InfUserExtensionItemObj.SetIdString(IdString)
InfUserExtensionItemObj.SetContent(UserExtensionCont)
- InfUserExtensionItemObj.SetSupArchList(IdContentItem[2])
-
+ InfUserExtensionItemObj.SetSupArchList(IdContentItem[2])
+
# for CheckItem in self.UserExtension:
# if IdContentItem[0] == CheckItem[0] and IdContentItem[1] == CheckItem[1]:
# if IdContentItem[2].upper() == 'COMMON' or CheckItem[2].upper() == 'COMMON':
# #
# # For COMMON ARCH type, do special check.
# #
-# Logger.Error('InfParser',
+# Logger.Error('InfParser',
# ToolError.FORMAT_INVALID,
# ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR%\
# (IdContentItem[0] + '.' + IdContentItem[1] + '.' + IdContentItem[2]),
-# File=GlobalData.gINF_MODULE_NAME,
+# File=GlobalData.gINF_MODULE_NAME,
# Line=LineNo,
# ExtraData=None)
-
+
if IdContentItem in self.UserExtension:
#
- # Each UserExtensions section header must have a unique set
+ # Each UserExtensions section header must have a unique set
# of UserId, IdString and Arch values.
- # This means that the same UserId can be used in more than one
- # section header, provided the IdString or Arch values are
- # different. The same IdString values can be used in more than
- # one section header if the UserId or Arch values are
- # different. The same UserId and the same IdString can be used
- # in a section header if the Arch values are different in each
+ # This means that the same UserId can be used in more than one
+ # section header, provided the IdString or Arch values are
+ # different. The same IdString values can be used in more than
+ # one section header if the UserId or Arch values are
+ # different. The same UserId and the same IdString can be used
+ # in a section header if the Arch values are different in each
# of the section headers.
#
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR%\
(IdContentItem[0] + '.' + IdContentItem[1] + '.' + IdContentItem[2]),
- File=GlobalData.gINF_MODULE_NAME,
+ File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
ExtraData=None)
else:
UserExtensionList = []
UserExtensionList.append(InfUserExtensionItemObj)
self.UserExtension[IdContentItem] = UserExtensionList
-
+
return True
-
+
def GetUserExtension(self):
return self.UserExtension
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/__init__.py b/BaseTools/Source/Python/UPT/Object/Parser/__init__.py
index b457c3c97b..f348a9b7fd 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/__init__.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
PARSER
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/Object/__init__.py b/BaseTools/Source/Python/UPT/Object/__init__.py
index 7925ab65ba..c13c5bfde6 100644
--- a/BaseTools/Source/Python/UPT/Object/__init__.py
+++ b/BaseTools/Source/Python/UPT/Object/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Object
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/Parser/DecParser.py b/BaseTools/Source/Python/UPT/Parser/DecParser.py
index 7ac0dfa1ed..a88b51d055 100644
--- a/BaseTools/Source/Python/UPT/Parser/DecParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/DecParser.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -75,31 +75,31 @@ class _DecBase:
# Data parsed by 'self' are saved to this object
#
self.ItemObject = None
-
+
def GetDataObject(self):
return self.ItemObject
-
+
def GetLocalMacro(self):
return self._LocalMacro
-
+
## BlockStart
#
# Called if a new section starts
#
def BlockStart(self):
self._LocalMacro = {}
-
+
## _CheckReDefine
#
# @param Key: to be checked if multi-defined
- # @param Scope: Format: [[SectionName, Arch], ...].
+ # @param Scope: Format: [[SectionName, Arch], ...].
# If scope is none, use global scope
#
def _CheckReDefine(self, Key, Scope = None):
if not Scope:
Scope = self._RawData.CurrentScope
return
-
+
SecArch = []
#
# Copy scope to SecArch, avoid Scope be changed outside
@@ -108,7 +108,7 @@ class _DecBase:
if Key not in self._ItemDict:
self._ItemDict[Key] = [[SecArch, self._RawData.LineIndex]]
return
-
+
for Value in self._ItemDict[Key]:
for SubValue in Scope:
#
@@ -132,18 +132,18 @@ class _DecBase:
self._LoggerError(ST.ERR_DECPARSE_REDEFINE % (Key, Value[1]))
return
self._ItemDict[Key].append([SecArch, self._RawData.LineIndex])
-
+
## CheckRequiredFields
# Some sections need to check if some fields exist, define section for example
# Derived class can re-implement, top parser will call this function after all parsing done
- #
+ #
def CheckRequiredFields(self):
if self._RawData:
pass
return True
-
+
## IsItemRequired
- # In DEC spec, sections must have at least one statement except user
+ # In DEC spec, sections must have at least one statement except user
# extension.
# For example: "[guids" [<attribs>] "]" <EOL> <statements>+
# sub class can override this method to indicate if statement is a must.
@@ -152,12 +152,12 @@ class _DecBase:
if self._RawData:
pass
return False
-
+
def _LoggerError(self, ErrorString):
- Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
Line = self._RawData.LineIndex,
ExtraData=ErrorString + ST.ERR_DECPARSE_LINE % self._RawData.CurrentLine)
-
+
def _ReplaceMacro(self, String):
if gMACRO_PATTERN.findall(String):
String = ReplaceMacro(String, self._LocalMacro, False,
@@ -169,11 +169,11 @@ class _DecBase:
MacroUsed = gMACRO_PATTERN.findall(String)
if MacroUsed:
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE,
- File=self._RawData.Filename,
+ File=self._RawData.Filename,
Line = self._RawData.LineIndex,
ExtraData = ST.ERR_DECPARSE_MACRO_RESOLVE % (str(MacroUsed), String))
return String
-
+
def _MacroParser(self, String):
TokenList = GetSplitValueList(String, ' ', 1)
if len(TokenList) < 2 or TokenList[1] == '':
@@ -184,7 +184,7 @@ class _DecBase:
self._LoggerError(ST.ERR_DECPARSE_MACRO_NAME)
elif not IsValidToken(MACRO_PATTERN, TokenList[0]):
self._LoggerError(ST.ERR_DECPARSE_MACRO_NAME_UPPER % TokenList[0])
-
+
if len(TokenList) == 1:
self._LocalMacro[TokenList[0]] = ''
else:
@@ -202,7 +202,7 @@ class _DecBase:
#
return None
-
+
## _TailCommentStrategy
#
# This function can be derived to parse tail comment
@@ -216,7 +216,7 @@ class _DecBase:
if self._RawData:
pass
return False
-
+
## _StopCurrentParsing
#
# Called in Parse if current parsing should be stopped when encounter some
@@ -229,7 +229,7 @@ class _DecBase:
if self._RawData:
pass
return Line[0] == DT.TAB_SECTION_START and Line[-1] == DT.TAB_SECTION_END
-
+
## _TryBackSlash
#
# Split comment and DEC content, concatenate lines if end of char is '\'
@@ -246,7 +246,7 @@ class _DecBase:
if Line == '':
self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
break
-
+
if Comment:
CommentList.append((Comment, self._RawData.LineIndex))
if Line[-1] != DT.TAB_SLASH:
@@ -269,7 +269,7 @@ class _DecBase:
if not Line or Line[-1] == DT.TAB_SLASH:
self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
CatLine += Line
-
+
#
# All MACRO values defined by the DEFINE statements in any section
# (except [Userextensions] sections for Intel) of the INF or DEC file
@@ -286,17 +286,17 @@ class _DecBase:
self._RawData.CurrentLine = CatLine
return CatLine, CommentList
-
+
## Parse
- # This is a template method in which other member functions which might
- # override by sub class are called. It is responsible for reading file
+ # This is a template method in which other member functions which might
+ # override by sub class are called. It is responsible for reading file
# line by line, and call other member functions to parse. This function
# should not be re-implement by sub class.
#
def Parse(self):
HeadComments = []
TailComments = []
-
+
#======================================================================
# CurComments may pointer to HeadComments or TailComments
#======================================================================
@@ -304,7 +304,7 @@ class _DecBase:
CurObj = None
ItemNum = 0
FromBuf = False
-
+
#======================================================================
# Used to report error information if empty section found
#======================================================================
@@ -333,20 +333,20 @@ class _DecBase:
# Set tail comments to previous statement if not empty.
#==========================================================
CurObj.SetTailComment(CurObj.GetTailComment()+TailComments)
-
+
if not FromBuf:
del TailComments[:]
CurComments = TailComments
Comments = []
if Comment:
Comments = [(Comment, self._RawData.LineIndex)]
-
+
#==============================================================
# Try if last char of line has backslash
#==============================================================
Line, Comments = self._TryBackSlash(Line, Comments)
CurComments.extend(Comments)
-
+
#==============================================================
# Macro found
#==============================================================
@@ -356,7 +356,7 @@ class _DecBase:
del TailComments[:]
CurComments = HeadComments
continue
-
+
if self._StopCurrentParsing(Line):
#==========================================================
# This line does not belong to this parse,
@@ -364,7 +364,7 @@ class _DecBase:
#==========================================================
self._RawData.SetNext(Line, HeadComments, TailComments)
break
-
+
Obj = self._ParseItem()
ItemNum += 1
if Obj:
@@ -387,7 +387,7 @@ class _DecBase:
CurComments.append(((Comment, self._RawData.LineIndex)))
else:
del CurComments[:]
-
+
if self._IsStatementRequired() and ItemNum == 0:
Logger.Error(
TOOL_NAME, FILE_PARSE_FAILURE,
@@ -405,7 +405,7 @@ class _DecDefine(_DecBase):
self.ItemObject = DecDefineObject(RawData.Filename)
self._LocalMacro = self._RawData.Macros
self._DefSecNum = 0
-
+
#
# Each field has a function to validate
#
@@ -416,12 +416,12 @@ class _DecDefine(_DecBase):
DT.TAB_DEC_DEFINES_PACKAGE_VERSION : self._SetPackageVersion,
DT.TAB_DEC_DEFINES_PKG_UNI_FILE : self._SetPackageUni,
}
-
+
def BlockStart(self):
self._DefSecNum += 1
if self._DefSecNum > 1:
self._LoggerError(ST.ERR_DECPARSE_DEFINE_MULTISEC)
-
+
## CheckRequiredFields
#
# Check required fields: DEC_SPECIFICATION, PACKAGE_NAME
@@ -430,21 +430,21 @@ class _DecDefine(_DecBase):
def CheckRequiredFields(self):
Ret = False
if self.ItemObject.GetPackageSpecification() == '':
- Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_DEC_SPECIFICATION)
elif self.ItemObject.GetPackageName() == '':
- Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_NAME)
elif self.ItemObject.GetPackageGuid() == '':
- Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_GUID)
elif self.ItemObject.GetPackageVersion() == '':
- Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_VERSION)
else:
Ret = True
return Ret
-
+
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = GetSplitValueList(Line, DT.TAB_EQUAL_SPLIT, 1)
@@ -456,13 +456,13 @@ class _DecDefine(_DecBase):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_UNKNOWKEY % TokenList[0])
else:
self.DefineValidation[TokenList[0]](TokenList[1])
-
+
DefineItem = DecDefineItemObject()
DefineItem.Key = TokenList[0]
DefineItem.Value = TokenList[1]
self.ItemObject.AddItem(DefineItem, self._RawData.CurrentScope)
return DefineItem
-
+
def _SetDecSpecification(self, Token):
if self.ItemObject.GetPackageSpecification():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_DEC_SPECIFICATION)
@@ -470,21 +470,21 @@ class _DecDefine(_DecBase):
if not IsValidDecVersionVal(Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_SPEC)
self.ItemObject.SetPackageSpecification(Token)
-
+
def _SetPackageName(self, Token):
if self.ItemObject.GetPackageName():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_NAME)
if not IsValidWord(Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGNAME)
self.ItemObject.SetPackageName(Token)
-
+
def _SetPackageGuid(self, Token):
if self.ItemObject.GetPackageGuid():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_GUID)
if not CheckGuidRegFormat(Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGGUID)
self.ItemObject.SetPackageGuid(Token)
-
+
def _SetPackageVersion(self, Token):
if self.ItemObject.GetPackageVersion():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_VERSION)
@@ -494,7 +494,7 @@ class _DecDefine(_DecBase):
if not DT.TAB_SPLIT in Token:
Token = Token + '.0'
self.ItemObject.SetPackageVersion(Token)
-
+
def _SetPackageUni(self, Token):
if self.ItemObject.GetPackageUniFile():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PKG_UNI_FILE)
@@ -508,13 +508,13 @@ class _DecInclude(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecIncludeObject(RawData.Filename)
-
+
def _ParseItem(self):
Line = self._RawData.CurrentLine
-
+
if not IsValidPath(Line, self._RawData.PackagePath):
- self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Line)
-
+ self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Line)
+
Item = DecIncludeItemObject(StripRoot(self._RawData.PackagePath, Line), self._RawData.PackagePath)
self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
return Item
@@ -527,32 +527,32 @@ class _DecLibraryclass(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecLibraryclassObject(RawData.Filename)
-
+
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = GetSplitValueList(Line, DT.TAB_VALUE_SPLIT)
if len(TokenList) != 2:
- self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_SPLIT)
+ self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_SPLIT)
if TokenList[0] == '' or TokenList[1] == '':
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_EMPTY)
if not IsValidToken('[A-Z][0-9A-Za-z]*', TokenList[0]):
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_LIB)
-
+
self._CheckReDefine(TokenList[0])
-
+
Value = TokenList[1]
#
# Must end with .h
#
if not Value.endswith('.h'):
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_PATH_EXT)
-
+
#
# Path must be existed
#
if not IsValidPath(Value, self._RawData.PackagePath):
self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Value)
-
+
Item = DecLibraryclassItemObject(TokenList[0], StripRoot(self._RawData.PackagePath, Value),
self._RawData.PackagePath)
self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
@@ -571,38 +571,38 @@ class _DecPcd(_DecBase):
# Key is token space and token number (integer), value is C name
#
self.TokenMap = {}
-
+
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = Line.split(DT.TAB_VALUE_SPLIT)
if len(TokenList) < 4:
self._LoggerError(ST.ERR_DECPARSE_PCD_SPLIT)
-
+
#
# Token space guid C name
#
PcdName = GetSplitValueList(TokenList[0], DT.TAB_SPLIT)
if len(PcdName) != 2 or PcdName[0] == '' or PcdName[1] == '':
self._LoggerError(ST.ERR_DECPARSE_PCD_NAME)
-
+
Guid = PcdName[0]
if not IsValidToken(CVAR_PATTERN, Guid):
self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_GUID)
-
+
#
# PCD C name
#
CName = PcdName[1]
if not IsValidToken(CVAR_PATTERN, CName):
self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_PCDCNAME)
-
+
self._CheckReDefine(Guid + DT.TAB_SPLIT + CName)
-
+
#
# Default value, may be C array, string or number
#
Data = DT.TAB_VALUE_SPLIT.join(TokenList[1:-2]).strip()
-
+
#
# PCD data type
#
@@ -623,18 +623,18 @@ class _DecPcd(_DecBase):
if long(Token) > 4294967295:
self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN_INT % Token)
Token = hex(long(Token))[:-1]
-
+
IntToken = long(Token, 0)
if (Guid, IntToken) in self.TokenMap:
if self.TokenMap[Guid, IntToken] != CName:
self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN_UNIQUE%(Token))
else:
self.TokenMap[Guid, IntToken] = CName
-
+
Item = DecPcdItemObject(Guid, CName, Data, DataType, Token)
self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
return Item
-
+
## _DecGuid
#
# Parse GUID, PPI, Protocol section
@@ -651,21 +651,21 @@ class _DecGuid(_DecBase):
DT.TAB_PPIS.upper() : self.PpiObj,
DT.TAB_PROTOCOLS.upper() : self.ProtocolObj
}
-
+
def GetDataObject(self):
if self._RawData.CurrentScope:
return self.ObjectDict[self._RawData.CurrentScope[0][0]]
return None
-
+
def GetGuidObject(self):
return self.GuidObj
-
+
def GetPpiObject(self):
return self.PpiObj
-
+
def GetProtocolObject(self):
return self.ProtocolObj
-
+
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = GetSplitValueList(Line, DT.TAB_EQUAL_SPLIT, 1)
@@ -677,9 +677,9 @@ class _DecGuid(_DecBase):
self._LoggerError(ST.ERR_DECPARSE_CGUID_GUID)
if not IsValidToken(CVAR_PATTERN, TokenList[0]):
self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_GUID)
-
+
self._CheckReDefine(TokenList[0])
-
+
if TokenList[1][0] != '{':
if not CheckGuidRegFormat(TokenList[1]):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGGUID)
@@ -691,7 +691,7 @@ class _DecGuid(_DecBase):
GuidString = GuidStructureStringToGuidString(TokenList[1])
if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidString == '':
self._LoggerError(ST.ERR_DECPARSE_CGUID_GUIDFORMAT)
-
+
#
# Check C format GUID
#
@@ -713,7 +713,7 @@ class _DecUserExtension(_DecBase):
self.ItemObject = DecUserExtensionObject(RawData.Filename)
self._Headers = []
self._CurItems = []
-
+
def BlockStart(self):
self._CurItems = []
for Header in self._RawData.CurrentScope:
@@ -721,7 +721,7 @@ class _DecUserExtension(_DecBase):
self._LoggerError(ST.ERR_DECPARSE_UE_DUPLICATE)
else:
self._Headers.append(Header)
-
+
for Item in self._CurItems:
if Item.UserId == Header[1] and Item.IdString == Header[2]:
Item.ArchAndModuleType.append(Header[3])
@@ -734,7 +734,7 @@ class _DecUserExtension(_DecBase):
self._CurItems.append(Item)
self.ItemObject.AddItem(Item, None)
self._LocalMacro = {}
-
+
def _ParseItem(self):
Line = self._RawData.CurrentLine
Item = None
@@ -749,8 +749,8 @@ class _DecUserExtension(_DecBase):
#
# Top dec parser
#
-class Dec(_DecBase, _DecComments):
- def __init__(self, DecFile, Parse = True):
+class Dec(_DecBase, _DecComments):
+ def __init__(self, DecFile, Parse = True):
try:
Content = ConvertSpecialChar(open(DecFile, 'rb').readlines())
except BaseException:
@@ -776,20 +776,20 @@ class Dec(_DecBase, _DecComments):
NewContent.append(Line + '\r')
RawData = FileContent(DecFile, NewContent)
-
+
_DecComments.__init__(self)
_DecBase.__init__(self, RawData)
-
+
self.BinaryHeadComment = []
self.PcdErrorCommentDict = {}
-
+
self._Define = _DecDefine(RawData)
self._Include = _DecInclude(RawData)
self._Guid = _DecGuid(RawData)
self._LibClass = _DecLibraryclass(RawData)
self._Pcd = _DecPcd(RawData)
self._UserEx = _DecUserExtension(RawData)
-
+
#
# DEC file supported data types (one type per section)
#
@@ -815,7 +815,7 @@ class Dec(_DecBase, _DecComments):
# Parsing done, check required fields
#
self.CheckRequiredFields()
-
+
def CheckRequiredFields(self):
for SectionParser in self._SectionParser.values():
if not SectionParser.CheckRequiredFields():
@@ -831,7 +831,7 @@ class Dec(_DecBase, _DecComments):
FileHeaderLineIndex = -1
BinaryHeaderLineIndex = -1
TokenSpaceGuidCName = ''
-
+
#
# Parse PCD error comment section
#
@@ -853,13 +853,13 @@ class Dec(_DecBase, _DecComments):
self._RawData.CurrentLine = self._RawData.CurrentLine.replace(DT.TAB_COMMENT_SPLIT, '').strip()
if self._RawData.CurrentLine != '':
if DT.TAB_VALUE_SPLIT not in self._RawData.CurrentLine:
- self._LoggerError(ST.ERR_DECPARSE_PCDERRORMSG_MISS_VALUE_SPLIT)
-
+ self._LoggerError(ST.ERR_DECPARSE_PCDERRORMSG_MISS_VALUE_SPLIT)
+
PcdErrorNumber, PcdErrorMsg = GetSplitValueList(self._RawData.CurrentLine, DT.TAB_VALUE_SPLIT, 1)
PcdErrorNumber = ParsePcdErrorCode(PcdErrorNumber, self._RawData.Filename, self._RawData.LineIndex)
if not PcdErrorMsg.strip():
self._LoggerError(ST.ERR_DECPARSE_PCD_MISS_ERRORMSG)
-
+
self.PcdErrorCommentDict[(TokenSpaceGuidCName, PcdErrorNumber)] = PcdErrorMsg.strip()
else:
TokenSpaceGuidCName = ''
@@ -870,27 +870,27 @@ class Dec(_DecBase, _DecComments):
while not self._RawData.IsEndOfFile():
Line, Comment = CleanString(self._RawData.GetNextLine())
-
+
#
# Header must be pure comment
#
if Line != '':
self._RawData.UndoNextLine()
break
-
+
if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) and Comment.find(DT.TAB_HEADER_COMMENT) > 0 \
and not Comment[2:Comment.find(DT.TAB_HEADER_COMMENT)].strip():
IsFileHeader = True
IsBinaryHeader = False
FileHeaderLineIndex = self._RawData.LineIndex
-
+
+ #
+ # Get license information before '@file'
#
- # Get license information before '@file'
- #
if not IsFileHeader and not IsBinaryHeader and Comment and Comment.startswith(DT.TAB_COMMENT_SPLIT) and \
DT.TAB_BINARY_HEADER_COMMENT not in Comment:
self._HeadComment.append((Comment, self._RawData.LineIndex))
-
+
if Comment and IsFileHeader and \
not(Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0):
@@ -899,15 +899,15 @@ class Dec(_DecBase, _DecComments):
# Double '#' indicates end of header comments
#
if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsFileHeader:
- IsFileHeader = False
+ IsFileHeader = False
continue
-
+
if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0:
IsBinaryHeader = True
IsFileHeader = False
BinaryHeaderLineIndex = self._RawData.LineIndex
-
+
if Comment and IsBinaryHeader:
self.BinaryHeadComment.append((Comment, self._RawData.LineIndex))
#
@@ -916,23 +916,23 @@ class Dec(_DecBase, _DecComments):
if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsBinaryHeader:
IsBinaryHeader = False
break
-
+
if FileHeaderLineIndex > -1 and not IsFileHeader and not IsBinaryHeader:
break
if FileHeaderLineIndex > BinaryHeaderLineIndex and FileHeaderLineIndex > -1 and BinaryHeaderLineIndex > -1:
self._LoggerError(ST.ERR_BINARY_HEADER_ORDER)
-
+
if FileHeaderLineIndex == -1:
# self._LoggerError(ST.ERR_NO_SOURCE_HEADER)
- Logger.Error(TOOL_NAME, FORMAT_INVALID,
+ Logger.Error(TOOL_NAME, FORMAT_INVALID,
ST.ERR_NO_SOURCE_HEADER,
File=self._RawData.Filename)
return
-
+
def _StopCurrentParsing(self, Line):
return False
-
+
def _ParseItem(self):
self._SectionHeaderParser()
if len(self._RawData.CurrentScope) == 0:
@@ -955,8 +955,8 @@ class Dec(_DecBase, _DecComments):
if Token.upper() != DT.TAB_USER_EXTENSIONS.upper():
self._LoggerError(ST.ERR_DECPARSE_SECTION_UE)
UserExtension = Token.upper()
- Par.AssertChar(DT.TAB_SPLIT, ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
-
+ Par.AssertChar(DT.TAB_SPLIT, ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
+
#
# UserID
#
@@ -991,7 +991,7 @@ class Dec(_DecBase, _DecComments):
Par.AssertEnd(ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
if 'COMMON' in ArchList and len(ArchList) > 1:
self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMON)
-
+
## Section header parser
#
# The section header is always in following format:
@@ -1001,7 +1001,7 @@ class Dec(_DecBase, _DecComments):
def _SectionHeaderParser(self):
if self._RawData.CurrentLine[0] != DT.TAB_SECTION_START or self._RawData.CurrentLine[-1] != DT.TAB_SECTION_END:
self._LoggerError(ST.ERR_DECPARSE_SECTION_IDENTIFY)
-
+
RawSection = self._RawData.CurrentLine[1:-1].strip().upper()
#
# Check defines section which is only allowed to occur once and
@@ -1039,7 +1039,7 @@ class Dec(_DecBase, _DecComments):
self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBTOOMANY % Item)
if DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() in SectionNames and len(SectionNames) > 1:
- self._LoggerError(ST.ERR_DECPARSE_SECTION_FEATUREFLAG % DT.TAB_PCDS_FEATURE_FLAG_NULL)
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_FEATUREFLAG % DT.TAB_PCDS_FEATURE_FLAG_NULL)
#
# S1 is always Arch
#
@@ -1064,7 +1064,7 @@ class Dec(_DecBase, _DecComments):
for Sec in SectionNames:
if not Sec.startswith(DT.TAB_PCDS.upper()):
self._LoggerError(ST.ERR_DECPARSE_SECTION_NAME % str(SectionNames))
-
+
def GetDefineSectionMacro(self):
return self._Define.GetLocalMacro()
def GetDefineSectionObject(self):
@@ -1084,11 +1084,11 @@ class Dec(_DecBase, _DecComments):
def GetUserExtensionSectionObject(self):
return self._UserEx.GetDataObject()
def GetPackageSpecification(self):
- return self._Define.GetDataObject().GetPackageSpecification()
+ return self._Define.GetDataObject().GetPackageSpecification()
def GetPackageName(self):
- return self._Define.GetDataObject().GetPackageName()
+ return self._Define.GetDataObject().GetPackageName()
def GetPackageGuid(self):
- return self._Define.GetDataObject().GetPackageGuid()
+ return self._Define.GetDataObject().GetPackageGuid()
def GetPackageVersion(self):
return self._Define.GetDataObject().GetPackageVersion()
def GetPackageUniFile(self):
diff --git a/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py b/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py
index 22a50680fb..c5c35ede78 100644
--- a/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py
+++ b/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py
@@ -1,11 +1,11 @@
## @file
# This file is used to define helper class and function for DEC parser
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -104,7 +104,7 @@ def StripRoot(Root, Path):
# Remove spaces
#
# @param Line: The string to be cleaned
-# @param CommentCharacter: Comment char, used to ignore comment content,
+# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString(Line, CommentCharacter=TAB_COMMENT_SPLIT, \
@@ -291,7 +291,7 @@ def IsValidPcdDatum(Type, Value):
Valid, Cause = IsValidLogicalExpr(Value, True)
if not Valid:
return False, Cause
-
+
return True, ""
## ParserHelper
@@ -366,7 +366,7 @@ class ParserHelper:
## AssertChar
#
- # Assert char at current index of string is AssertChar, or will report
+ # Assert char at current index of string is AssertChar, or will report
# error message
#
# @param AssertChar: AssertChar
diff --git a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
index 760f28a41f..029a436cec 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -133,7 +133,7 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
continue
#
- # Found [Packages] section
+ # Found [Packages] section
#
if RePackageHeader.match(Line):
PackageHederFlag = True
@@ -174,7 +174,7 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
#
# Replace with Local section Macro and [Defines] section Macro.
- #
+ #
Line = InfExpandMacro(Line, (FileNameString, Line, LineNo), DefineSectionMacros, PackageSectionMacros, True)
Line = GetSplitValueList(Line, "#", 1)[0]
diff --git a/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
index f220402cb5..43cdeee3a7 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Binaries] sections in INF file
+# This file contained the parser for [Binaries] sections in INF file
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -35,7 +35,7 @@ class InfBinarySectionParser(InfParserSectionRoot):
#
def InfBinaryParser(self, SectionString, InfSectionObject, FileName):
#
- # Macro defined in this section
+ # Macro defined in this section
#
SectionMacros = {}
ValueList = []
@@ -56,8 +56,8 @@ class InfBinarySectionParser(InfParserSectionRoot):
StillCommentFalg = False
HeaderComments = []
- LineComment = None
-
+ LineComment = None
+
AllSectionContent = ''
#
# Parse section content
@@ -65,16 +65,16 @@ class InfBinarySectionParser(InfParserSectionRoot):
for Line in SectionString:
BinLineContent = Line[0]
BinLineNo = Line[1]
-
+
if BinLineContent.strip() == '':
continue
-
+
CurrentLineObj = CurrentLine()
CurrentLineObj.FileName = FileName
CurrentLineObj.LineString = BinLineContent
CurrentLineObj.LineNo = BinLineNo
#
- # Found Header Comments
+ # Found Header Comments
#
if BinLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
@@ -85,7 +85,7 @@ class InfBinarySectionParser(InfParserSectionRoot):
AllSectionContent += BinLineContent + DT.END_OF_LINE
continue
#
- # First time encounter comment
+ # First time encounter comment
#
else:
#
@@ -98,14 +98,14 @@ class InfBinarySectionParser(InfParserSectionRoot):
continue
else:
StillCommentFalg = False
-
+
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
-
+
#
# Find Tail comment.
#
@@ -114,8 +114,8 @@ class InfBinarySectionParser(InfParserSectionRoot):
BinLineContent = BinLineContent[:BinLineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineComment is None:
LineComment = InfLineCommentObject()
- LineComment.SetTailComments(TailComments)
-
+ LineComment.SetTailComments(TailComments)
+
#
# Find Macro
#
@@ -126,22 +126,22 @@ class InfBinarySectionParser(InfParserSectionRoot):
if MacroDef[0] is not None:
SectionMacros[MacroDef[0]] = MacroDef[1]
LineComment = None
- HeaderComments = []
+ HeaderComments = []
continue
-
+
#
# Replace with Local section Macro and [Defines] section Macro.
- #
- LineContent = InfExpandMacro(BinLineContent,
- (FileName, BinLineContent, BinLineNo),
- self.FileLocalMacros,
+ #
+ LineContent = InfExpandMacro(BinLineContent,
+ (FileName, BinLineContent, BinLineNo),
+ self.FileLocalMacros,
SectionMacros, True)
-
- AllSectionContent += LineContent + DT.END_OF_LINE
+
+ AllSectionContent += LineContent + DT.END_OF_LINE
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
-
- #
+
+ #
# Should equal to UI/SEC_UI/UNI_UI
#
ValueList[0] = ValueList[0].strip()
@@ -149,84 +149,84 @@ class InfBinarySectionParser(InfParserSectionRoot):
ValueList[0] == DT.BINARY_FILE_TYPE_SEC_UI or \
ValueList[0] == DT.BINARY_FILE_TYPE_UI:
if len(ValueList) == 2:
- TokenList = GetSplitValueList(ValueList[1],
- DT.TAB_VALUE_SPLIT,
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
2)
NewValueList = []
NewValueList.append(ValueList[0])
for Item in TokenList:
NewValueList.append(Item)
- UiBinaryList.append((NewValueList,
- LineComment,
- CurrentLineObj))
- #
+ UiBinaryList.append((NewValueList,
+ LineComment,
+ CurrentLineObj))
+ #
# Should equal to VER/SEC_VER/UNI_VER
#
elif ValueList[0] == DT.BINARY_FILE_TYPE_UNI_VER or \
ValueList[0] == DT.BINARY_FILE_TYPE_SEC_VER or \
ValueList[0] == DT.BINARY_FILE_TYPE_VER:
if len(ValueList) == 2:
- TokenList = GetSplitValueList(ValueList[1],
- DT.TAB_VALUE_SPLIT,
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
2)
NewValueList = []
NewValueList.append(ValueList[0])
for Item in TokenList:
- NewValueList.append(Item)
- VerBinaryList.append((NewValueList,
- LineComment,
+ NewValueList.append(Item)
+ VerBinaryList.append((NewValueList,
+ LineComment,
CurrentLineObj))
else:
if len(ValueList) == 2:
if ValueList[0].strip() == 'SUBTYPE_GUID':
- TokenList = GetSplitValueList(ValueList[1],
- DT.TAB_VALUE_SPLIT,
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
5)
else:
- TokenList = GetSplitValueList(ValueList[1],
- DT.TAB_VALUE_SPLIT,
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
4)
-
+
NewValueList = []
NewValueList.append(ValueList[0])
for Item in TokenList:
- NewValueList.append(Item)
- ComBinaryList.append((NewValueList,
- LineComment,
+ NewValueList.append(Item)
+ ComBinaryList.append((NewValueList,
+ LineComment,
CurrentLineObj))
elif len(ValueList) == 1:
NewValueList = []
NewValueList.append(ValueList[0])
- ComBinaryList.append((NewValueList,
- LineComment,
+ ComBinaryList.append((NewValueList,
+ LineComment,
CurrentLineObj))
-
-
-
-
+
+
+
+
ValueList = []
LineComment = None
TailComments = ''
- HeaderComments = []
+ HeaderComments = []
continue
#
# Current section archs
- #
+ #
ArchList = []
for Item in self.LastSectionHeaderContent:
if Item[1] not in ArchList:
- ArchList.append(Item[1])
+ ArchList.append(Item[1])
InfSectionObject.SetSupArchList(Item[1])
-
- InfSectionObject.SetAllContent(AllSectionContent)
- if not InfSectionObject.SetBinary(UiBinaryList,
- VerBinaryList,
- ComBinaryList,
+
+ InfSectionObject.SetAllContent(AllSectionContent)
+ if not InfSectionObject.SetBinary(UiBinaryList,
+ VerBinaryList,
+ ComBinaryList,
ArchList):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[Binaries]"),
File=FileName,
- Line=Item[3])
- \ No newline at end of file
+ Line=Item[3])
+
diff --git a/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py
index 941641a845..43b6ee2e7f 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for BuildOption sections in INF file
+# This file contained the parser for BuildOption sections in INF file
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -36,10 +36,10 @@ class InfBuildOptionSectionParser(InfParserSectionRoot):
#
#
def InfBuildOptionParser(self, SectionString, InfSectionObject, FileName):
-
+
BuildOptionList = []
SectionContent = ''
-
+
if not GlobalData.gIS_BINARY_INF:
ValueList = []
LineNo = 0
@@ -49,24 +49,24 @@ class InfBuildOptionSectionParser(InfParserSectionRoot):
LineNo = Line[1]
TailComments = ''
ReplaceFlag = False
-
+
if LineContent.strip() == '':
- SectionContent += LineContent + DT.END_OF_LINE
- continue
+ SectionContent += LineContent + DT.END_OF_LINE
+ continue
#
# Found Comment
#
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
- SectionContent += LineContent + DT.END_OF_LINE
+ SectionContent += LineContent + DT.END_OF_LINE
continue
-
+
#
# Find Tail comment.
#
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
- LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
-
+ LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+
TokenList = GetSplitValueList(LineContent, DT.TAB_DEQUAL_SPLIT, 1)
if len(TokenList) == 2:
#
@@ -82,21 +82,21 @@ class InfBuildOptionSectionParser(InfParserSectionRoot):
if len(TokenList) == 2:
TokenList.append('False')
else:
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
- ExtraData=LineContent,
+ ExtraData=LineContent,
File=FileName,
- Line=LineNo)
-
+ Line=LineNo)
+
ValueList[0:len(TokenList)] = TokenList
-
+
#
# Replace with [Defines] section Macro
#
- ValueList[0] = InfExpandMacro(ValueList[0], (FileName, LineContent, LineNo),
+ ValueList[0] = InfExpandMacro(ValueList[0], (FileName, LineContent, LineNo),
self.FileLocalMacros, None)
- ValueList[1] = InfExpandMacro(ValueList[1], (FileName, LineContent, LineNo),
+ ValueList[1] = InfExpandMacro(ValueList[1], (FileName, LineContent, LineNo),
self.FileLocalMacros, None, True)
EqualString = ''
if not ReplaceFlag:
@@ -108,30 +108,30 @@ class InfBuildOptionSectionParser(InfParserSectionRoot):
Family = GetSplitValueList(ValueList[0], DT.TAB_COLON_SPLIT, 1)
if len(Family) == 2:
- if not IsValidFamily(Family[0]):
- Logger.Error('InfParser',
+ if not IsValidFamily(Family[0]):
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
- ExtraData=LineContent,
+ ExtraData=LineContent,
File=FileName,
Line=LineNo)
if not IsValidBuildOptionName(Family[1]):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
- ExtraData=LineContent,
+ ExtraData=LineContent,
File=FileName,
Line=LineNo)
if len(Family) == 1:
if not IsValidBuildOptionName(Family[0]):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
- ExtraData=LineContent,
+ ExtraData=LineContent,
File=FileName,
Line=LineNo)
-
- BuildOptionList.append(ValueList)
+
+ BuildOptionList.append(ValueList)
ValueList = []
continue
else:
@@ -147,20 +147,20 @@ class InfBuildOptionSectionParser(InfParserSectionRoot):
if not (Item[1] == '' or Item[1] == '') and Item[1] not in ArchList:
ArchList.append(Item[1])
InfSectionObject.SetSupArchList(Item[1])
-
+
InfSectionObject.SetAllContent(SectionContent)
if not InfSectionObject.SetBuildOptions(BuildOptionList, ArchList, SectionContent):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[BuilOptions]"),
File=FileName,
- Line=LastItem[3])
+ Line=LastItem[3])
## InfBuildOptionParser
#
#
def InfAsBuiltBuildOptionParser(SectionString, FileName):
- BuildOptionList = []
+ BuildOptionList = []
#
# AsBuild Binary INF file.
#
@@ -171,7 +171,7 @@ def InfAsBuiltBuildOptionParser(SectionString, FileName):
Count += 1
LineContent = Line[0]
LineNo = Line[1]
-
+
#
# The last line
#
@@ -186,33 +186,33 @@ def InfAsBuiltBuildOptionParser(SectionString, FileName):
else:
if len(BuildOptionItem) > 0:
BuildOptionList.append(BuildOptionItem)
-
- break
-
+
+ break
+
if LineContent.strip() == '':
AsBuildOptionFlag = False
continue
-
+
if LineContent.strip().startswith("##") and AsBuildOptionFlag:
if len(BuildOptionItem) > 0:
- BuildOptionList.append(BuildOptionItem)
-
+ BuildOptionList.append(BuildOptionItem)
+
BuildOptionItem = []
-
+
if not LineContent.strip().startswith("#"):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
- ST.ERR_BO_CONTATIN_ASBUILD_AND_COMMON,
- File=FileName,
- Line=LineNo,
+ ST.ERR_BO_CONTATIN_ASBUILD_AND_COMMON,
+ File=FileName,
+ Line=LineNo,
ExtraData=LineContent)
-
+
if IsAsBuildOptionInfo(LineContent):
AsBuildOptionFlag = True
continue
-
+
if AsBuildOptionFlag:
BuildOptionInfo = GetHelpStringByRemoveHashKey(LineContent)
BuildOptionItem.append(BuildOptionInfo)
-
- return BuildOptionList \ No newline at end of file
+
+ return BuildOptionList
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
index f7749d55a0..397a9b18f9 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for define sections in INF file
+# This file contained the parser for define sections in INF file
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -33,32 +33,32 @@ from Logger import StringTable as ST
from Parser.InfParserMisc import InfParserSectionRoot
## __GetValidateArchList
-#
+#
#
def GetValidateArchList(LineContent):
-
+
TempArch = ''
ArchList = []
ValidateAcrhPatten = re.compile(r"^\s*#\s*VALID_ARCHITECTURES\s*=\s*.*$", re.DOTALL)
-
+
if ValidateAcrhPatten.match(LineContent):
TempArch = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)[1]
-
+
TempArch = GetSplitValueList(TempArch, '(', 1)[0]
-
+
ArchList = re.split('\s+', TempArch)
NewArchList = []
for Arch in ArchList:
if IsValidArch(Arch):
NewArchList.append(Arch)
-
+
ArchList = NewArchList
-
- return ArchList
+
+ return ArchList
class InfDefinSectionParser(InfParserSectionRoot):
def InfDefineParser(self, SectionString, InfSectionObject, FileName, SectionComment):
-
+
if SectionComment:
pass
#
@@ -74,18 +74,18 @@ class InfDefinSectionParser(InfParserSectionRoot):
# Add WORKSPACE to global Marco dict.
#
self.FileLocalMacros['WORKSPACE'] = GlobalData.gWORKSPACE
-
+
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
TailComments = ''
LineComment = None
-
+
LineInfo = ['', -1, '']
LineInfo[0] = FileName
LineInfo[1] = LineNo
LineInfo[2] = LineContent
-
+
if LineContent.strip() == '':
continue
#
@@ -106,7 +106,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
SectionContent += LineContent + DT.END_OF_LINE
continue
#
- # First time encounter comment
+ # First time encounter comment
#
else:
#
@@ -119,14 +119,14 @@ class InfDefinSectionParser(InfParserSectionRoot):
continue
else:
StillCommentFalg = False
-
+
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
-
+
#
# Find Tail comment.
#
@@ -136,62 +136,62 @@ class InfDefinSectionParser(InfParserSectionRoot):
if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
-
+
#
# Find Macro
#
- Name, Value = MacroParser((LineContent, LineNo),
- FileName,
- DT.MODEL_META_DATA_HEADER,
+ Name, Value = MacroParser((LineContent, LineNo),
+ FileName,
+ DT.MODEL_META_DATA_HEADER,
self.FileLocalMacros)
if Name is not None:
self.FileLocalMacros[Name] = Value
- continue
+ continue
#
# Replace with [Defines] section Macro
#
- LineContent = InfExpandMacro(LineContent,
- (FileName, LineContent, LineNo),
- self.FileLocalMacros,
+ LineContent = InfExpandMacro(LineContent,
+ (FileName, LineContent, LineNo),
+ self.FileLocalMacros,
None, True)
-
+
SectionContent += LineContent + DT.END_OF_LINE
-
+
TokenList = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)
if len(TokenList) < 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
- LineInfo=LineInfo)
+ LineInfo=LineInfo)
_ValueList[0:len(TokenList)] = TokenList
if not _ValueList[0]:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_NAME,
LineInfo=LineInfo)
if not _ValueList[1]:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
- LineInfo=LineInfo)
-
- Name, Value = _ValueList[0], _ValueList[1]
-
+ LineInfo=LineInfo)
+
+ Name, Value = _ValueList[0], _ValueList[1]
+
InfDefMemberObj = InfDefMember(Name, Value)
if (LineComment is not None):
InfDefMemberObj.Comments.SetHeaderComments(LineComment.GetHeaderComments())
InfDefMemberObj.Comments.SetTailComments(LineComment.GetTailComments())
-
+
InfDefMemberObj.CurrentLine.SetFileName(self.FullPath)
InfDefMemberObj.CurrentLine.SetLineString(LineContent)
InfDefMemberObj.CurrentLine.SetLineNo(LineNo)
-
+
_ContentList.append(InfDefMemberObj)
HeaderComments = []
TailComments = ''
-
+
#
# Current Define section archs
#
if not ArchList:
ArchList = ['COMMON']
-
- InfSectionObject.SetAllContent(SectionContent)
-
+
+ InfSectionObject.SetAllContent(SectionContent)
+
InfSectionObject.SetDefines(_ContentList, Arch=ArchList)
- \ No newline at end of file
+
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
index 332e2f0143..7e2114313e 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Depex] sections in INF file
+# This file contained the parser for [Depex] sections in INF file
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -29,7 +29,7 @@ from Parser.InfParserMisc import InfParserSectionRoot
class InfDepexSectionParser(InfParserSectionRoot):
## InfDepexParser
#
- # For now, only separate Depex String and comments.
+ # For now, only separate Depex String and comments.
# Have two types of section header.
# 1. [Depex.Arch.ModuleType, ...]
# 2. [Depex.Arch|FFE, ...]
@@ -44,7 +44,7 @@ class InfDepexSectionParser(InfParserSectionRoot):
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
-
+
#
# Found comment
#
@@ -54,34 +54,34 @@ class InfDepexSectionParser(InfParserSectionRoot):
#
# Replace with [Defines] section Macro
#
- LineContent = InfExpandMacro(LineContent,
- (FileName, LineContent, Line[1]),
- self.FileLocalMacros,
+ LineContent = InfExpandMacro(LineContent,
+ (FileName, LineContent, Line[1]),
+ self.FileLocalMacros,
None, True)
-
+
CommentCount = LineContent.find(DT.TAB_COMMENT_SPLIT)
-
+
if CommentCount > -1:
- DepexComment.append((LineContent[CommentCount:], LineNo))
+ DepexComment.append((LineContent[CommentCount:], LineNo))
LineContent = LineContent[:CommentCount-1]
-
-
+
+
CommentCount = -1
DepexContent.append((LineContent, LineNo))
-
+
TokenList = GetSplitValueList(LineContent, DT.TAB_COMMENT_SPLIT)
ValueList[0:len(TokenList)] = TokenList
-
+
#
# Current section archs
- #
+ #
KeyList = []
LastItem = ''
for Item in self.LastSectionHeaderContent:
LastItem = Item
if (Item[1], Item[2], Item[3]) not in KeyList:
- KeyList.append((Item[1], Item[2], Item[3]))
-
+ KeyList.append((Item[1], Item[2], Item[3]))
+
NewCommentList = []
FormatCommentLn = -1
ReFormatComment = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
@@ -90,15 +90,15 @@ class InfDepexSectionParser(InfParserSectionRoot):
if ReFormatComment.match(CommentContent) is not None:
FormatCommentLn = CommentItem[1] + 1
continue
-
+
if CommentItem[1] != FormatCommentLn:
NewCommentList.append(CommentContent)
else:
FormatCommentLn = CommentItem[1] + 1
-
+
if not InfSectionObject.SetDepex(DepexContent, KeyList = KeyList, CommentList = NewCommentList):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[Depex]"),
- File=FileName,
- Line=LastItem[3]) \ No newline at end of file
+ File=FileName,
+ Line=LastItem[3])
diff --git a/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
index 956c116c6e..75396dd7eb 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Guids], [Ppis], [Protocols] sections in INF file
+# This file contained the parser for [Guids], [Ppis], [Protocols] sections in INF file
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -37,7 +37,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
def InfGuidParser(self, SectionString, InfSectionObject, FileName):
#
- # Macro defined in this section
+ # Macro defined in this section
#
SectionMacros = {}
ValueList = []
@@ -88,7 +88,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Replace with Local section Macro and [Defines] section Macro.
- #
+ #
ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
@@ -104,7 +104,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Current section archs
- #
+ #
ArchList = []
LineIndex = -1
for Item in self.LastSectionHeaderContent:
@@ -124,7 +124,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
def InfPpiParser(self, SectionString, InfSectionObject, FileName):
#
- # Macro defined in this section
+ # Macro defined in this section
#
SectionMacros = {}
ValueList = []
@@ -175,7 +175,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Replace with Local section Macro and [Defines] section Macro.
- #
+ #
ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo), self.FileLocalMacros, SectionMacros)
for Value in ValueList]
@@ -189,7 +189,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Current section archs
- #
+ #
ArchList = []
LineIndex = -1
for Item in self.LastSectionHeaderContent:
@@ -206,7 +206,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
## InfUserExtensionParser
#
- #
+ #
def InfUserExtensionParser(self, SectionString, InfSectionObject, FileName):
UserExtensionContent = ''
@@ -226,7 +226,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Current section UserId, IdString
- #
+ #
IdContentList = []
LastItem = ''
SectionLineNo = None
@@ -265,14 +265,14 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
IdContentList.append((UserId, IdString, Arch))
else:
#
- # Each UserExtensions section header must have a unique set
+ # Each UserExtensions section header must have a unique set
# of UserId, IdString and Arch values.
- # This means that the same UserId can be used in more than one
- # section header, provided the IdString or Arch values are
- # different. The same IdString values can be used in more than
- # one section header if the UserId or Arch values are
- # different. The same UserId and the same IdString can be used
- # in a section header if the Arch values are different in each
+ # This means that the same UserId can be used in more than one
+ # section header, provided the IdString or Arch values are
+ # different. The same IdString values can be used in more than
+ # one section header if the UserId or Arch values are
+ # different. The same UserId and the same IdString can be used
+ # in a section header if the Arch values are different in each
# of the section headers.
#
Logger.Error('InfParser',
@@ -294,7 +294,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
def InfProtocolParser(self, SectionString, InfSectionObject, FileName):
#
- # Macro defined in this section
+ # Macro defined in this section
#
SectionMacros = {}
ValueList = []
@@ -345,7 +345,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Replace with Local section Macro and [Defines] section Macro.
- #
+ #
ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo), self.FileLocalMacros, SectionMacros)
for Value in ValueList]
@@ -359,7 +359,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
#
# Current section archs
- #
+ #
ArchList = []
LineIndex = -1
for Item in self.LastSectionHeaderContent:
diff --git a/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
index 549e67f08d..1a7014436f 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Libraries] sections in INF file
+# This file contained the parser for [Libraries] sections in INF file
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -34,14 +34,14 @@ from Parser.InfParserMisc import InfParserSectionRoot
class InfLibrarySectionParser(InfParserSectionRoot):
## InfLibraryParser
#
- #
+ #
def InfLibraryParser(self, SectionString, InfSectionObject, FileName):
#
# For Common INF file
#
if not GlobalData.gIS_BINARY_INF:
#
- # Macro defined in this section
+ # Macro defined in this section
#
SectionMacros = {}
ValueList = []
@@ -60,7 +60,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
continue
#
- # Found Header Comments
+ # Found Header Comments
#
if LibLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
@@ -70,7 +70,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
LibHeaderComments.append(Line)
continue
#
- # First time encounter comment
+ # First time encounter comment
#
else:
#
@@ -118,7 +118,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
#
# Replace with Local section Macro and [Defines] section Macro.
- #
+ #
ValueList = [InfExpandMacro(Value, (FileName, LibLineContent, LibLineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
@@ -134,7 +134,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
#
# Current section archs
- #
+ #
KeyList = []
for Item in self.LastSectionHeaderContent:
if (Item[1], Item[2]) not in KeyList:
@@ -188,7 +188,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
#
# Current section archs
- #
+ #
KeyList = []
Item = ['', '', '']
for Item in self.LastSectionHeaderContent:
diff --git a/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
index 8fb2898826..0ddd4ef0fc 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Packages] sections in INF file
+# This file contained the parser for [Packages] sections in INF file
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -31,29 +31,29 @@ from Parser.InfParserMisc import InfParserSectionRoot
class InfPackageSectionParser(InfParserSectionRoot):
## InfPackageParser
#
- #
+ #
def InfPackageParser(self, SectionString, InfSectionObject, FileName):
#
- # Macro defined in this section
+ # Macro defined in this section
#
SectionMacros = {}
ValueList = []
PackageList = []
StillCommentFalg = False
HeaderComments = []
- LineComment = None
+ LineComment = None
#
# Parse section content
#
for Line in SectionString:
PkgLineContent = Line[0]
- PkgLineNo = Line[1]
-
+ PkgLineNo = Line[1]
+
if PkgLineContent.strip() == '':
continue
-
+
#
- # Find Header Comments
+ # Find Header Comments
#
if PkgLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
@@ -63,7 +63,7 @@ class InfPackageSectionParser(InfParserSectionRoot):
HeaderComments.append(Line)
continue
#
- # First time encounter comment
+ # First time encounter comment
#
else:
#
@@ -75,14 +75,14 @@ class InfPackageSectionParser(InfParserSectionRoot):
continue
else:
StillCommentFalg = False
-
+
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
-
+
#
# Find Tail comment.
#
@@ -91,7 +91,7 @@ class InfPackageSectionParser(InfParserSectionRoot):
PkgLineContent = PkgLineContent[:PkgLineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineComment is None:
LineComment = InfLineCommentObject()
- LineComment.SetTailComments(TailComments)
+ LineComment.SetTailComments(TailComments)
#
# Find Macro
#
@@ -102,39 +102,39 @@ class InfPackageSectionParser(InfParserSectionRoot):
if Name is not None:
SectionMacros[Name] = Value
LineComment = None
- HeaderComments = []
+ HeaderComments = []
continue
TokenList = GetSplitValueList(PkgLineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
-
+
#
# Replace with Local section Macro and [Defines] section Macro.
- #
- ValueList = [InfExpandMacro(Value, (FileName, PkgLineContent, PkgLineNo),
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, PkgLineContent, PkgLineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
-
- PackageList.append((ValueList, LineComment,
+
+ PackageList.append((ValueList, LineComment,
(PkgLineContent, PkgLineNo, FileName)))
ValueList = []
LineComment = None
TailComments = ''
- HeaderComments = []
+ HeaderComments = []
continue
#
# Current section archs
- #
+ #
ArchList = []
for Item in self.LastSectionHeaderContent:
if Item[1] not in ArchList:
- ArchList.append(Item[1])
-
+ ArchList.append(Item[1])
+
if not InfSectionObject.SetPackages(PackageList, Arch = ArchList):
- Logger.Error('InfParser',
- FORMAT_INVALID,
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR\
%("[Packages]"),
File=FileName,
- Line=Item[3]) \ No newline at end of file
+ Line=Item[3])
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParser.py b/BaseTools/Source/Python/UPT/Parser/InfParser.py
index e6048a1d1c..cd99262e03 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfParser.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -49,24 +49,24 @@ from Parser.InfParserMisc import IsBinaryInf
#
def OpenInfFile(Filename):
FileLinesList = []
-
+
try:
FInputfile = open(Filename, "rb", 0)
try:
FileLinesList = FInputfile.readlines()
except BaseException:
- Logger.Error("InfParser",
- FILE_READ_FAILURE,
+ Logger.Error("InfParser",
+ FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=Filename)
finally:
FInputfile.close()
except BaseException:
- Logger.Error("InfParser",
- FILE_READ_FAILURE,
+ Logger.Error("InfParser",
+ FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=Filename)
-
+
return FileLinesList
## InfParser
@@ -74,20 +74,20 @@ def OpenInfFile(Filename):
# This class defined the structure used in InfParser object
#
# @param InfObject: Inherited from InfSectionParser class
-# @param Filename: Input value for Filename of INF file, default is
+# @param Filename: Input value for Filename of INF file, default is
# None
-# @param WorkspaceDir: Input value for current workspace directory,
+# @param WorkspaceDir: Input value for current workspace directory,
# default is None
#
class InfParser(InfSectionParser):
def __init__(self, Filename = None, WorkspaceDir = None):
-
+
#
# Call parent class construct function
#
super(InfParser, self).__init__()
-
+
self.WorkspaceDir = WorkspaceDir
self.SupArchList = DT.ARCH_LIST
self.EventList = []
@@ -107,7 +107,7 @@ class InfParser(InfSectionParser):
# @param Filename: Input value for filename of INF file
#
def ParseInfFile(self, Filename):
-
+
Filename = NormPath(Filename)
(Path, Name) = os.path.split(Filename)
self.FullPath = Filename
@@ -120,30 +120,30 @@ class InfParser(InfSectionParser):
# Initialize common data
#
LineNo = 0
- CurrentSection = DT.MODEL_UNKNOWN
+ CurrentSection = DT.MODEL_UNKNOWN
SectionLines = []
-
+
#
# Flags
#
- HeaderCommentStart = False
+ HeaderCommentStart = False
HeaderCommentEnd = False
HeaderStarLineNo = -1
- BinaryHeaderCommentStart = False
+ BinaryHeaderCommentStart = False
BinaryHeaderCommentEnd = False
BinaryHeaderStarLineNo = -1
-
+
#
# While Section ends. parse whole section contents.
#
NewSectionStartFlag = False
FirstSectionStartFlag = False
-
+
#
# Parse file content
#
CommentBlock = []
-
+
#
# Variables for Event/Hob/BootMode
#
@@ -151,44 +151,44 @@ class InfParser(InfSectionParser):
self.HobList = []
self.BootModeList = []
SectionType = ''
-
+
FileLinesList = OpenInfFile (Filename)
-
+
#
# One INF file can only has one [Defines] section.
#
DefineSectionParsedFlag = False
-
+
#
# Convert special characters in lines to space character.
#
FileLinesList = ConvertSpecialChar(FileLinesList)
-
+
#
# Process Line Extender
#
FileLinesList = ProcessLineExtender(FileLinesList)
-
+
#
# Process EdkI INF style comment if found
#
OrigLines = [Line for Line in FileLinesList]
FileLinesList, EdkCommentStartPos = ProcessEdkComment(FileLinesList)
-
+
#
# Judge whether the INF file is Binary INF or not
#
if IsBinaryInf(FileLinesList):
GlobalData.gIS_BINARY_INF = True
-
+
InfSectionCommonDefObj = None
-
+
for Line in FileLinesList:
LineNo = LineNo + 1
Line = Line.strip()
if (LineNo < len(FileLinesList) - 1):
NextLine = FileLinesList[LineNo].strip()
-
+
#
# blank line
#
@@ -209,7 +209,7 @@ class InfParser(InfSectionParser):
HeaderStarLineNo = LineNo
SectionLines.append((Line, LineNo))
HeaderCommentStart = True
- continue
+ continue
#
# Collect Header content.
@@ -225,11 +225,11 @@ class InfParser(InfSectionParser):
if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith("#")) and HeaderCommentStart \
and not HeaderCommentEnd:
HeaderCommentEnd = True
- BinaryHeaderCommentStart = False
+ BinaryHeaderCommentStart = False
BinaryHeaderCommentEnd = False
- HeaderCommentStart = False
+ HeaderCommentStart = False
if Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1:
- self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
+ self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
SectionLines = []
else:
SectionLines.append((Line, LineNo))
@@ -255,8 +255,8 @@ class InfParser(InfSectionParser):
SectionLines.append((Line, LineNo))
BinaryHeaderCommentStart = True
HeaderCommentEnd = True
- continue
-
+ continue
+
#
# check whether there are more than one binary header exist
#
@@ -266,7 +266,7 @@ class InfParser(InfSectionParser):
FORMAT_INVALID,
ST.ERR_MULTIPLE_BINARYHEADER_EXIST,
File=Filename)
-
+
#
# Collect Binary Header content.
#
@@ -287,17 +287,17 @@ class InfParser(InfSectionParser):
#
self.InfHeaderParser(SectionLines, self.InfBinaryHeader, self.FileName, True)
SectionLines = []
- BinaryHeaderCommentEnd = True
- continue
+ BinaryHeaderCommentEnd = True
+ continue
#
# Find a new section tab
- # Or at the last line of INF file,
+ # Or at the last line of INF file,
# need to process the last section.
#
LastSectionFalg = False
if LineNo == len(FileLinesList):
LastSectionFalg = True
-
+
if Line.startswith(DT.TAB_COMMENT_SPLIT) and not Line.startswith(DT.TAB_SPECIAL_COMMENT):
SectionLines.append((Line, LineNo))
if not LastSectionFalg:
@@ -307,23 +307,23 @@ class InfParser(InfSectionParser):
# Encountered a section. start with '[' and end with ']'
#
if (Line.startswith(DT.TAB_SECTION_START) and \
- Line.find(DT.TAB_SECTION_END) > -1) or LastSectionFalg:
-
- HeaderCommentEnd = True
- BinaryHeaderCommentEnd = True
-
+ Line.find(DT.TAB_SECTION_END) > -1) or LastSectionFalg:
+
+ HeaderCommentEnd = True
+ BinaryHeaderCommentEnd = True
+
if not LastSectionFalg:
#
# check to prevent '#' inside section header
#
HeaderContent = Line[1:Line.find(DT.TAB_SECTION_END)]
if HeaderContent.find(DT.TAB_COMMENT_SPLIT) != -1:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID,
File=self.FullPath,
- Line=LineNo,
- ExtraData=Line)
+ Line=LineNo,
+ ExtraData=Line)
#
# Keep last time section header content for section parser
@@ -339,7 +339,7 @@ class InfParser(InfSectionParser):
if CommentIndex > -1:
TailComments = Line[CommentIndex:]
Line = Line[:CommentIndex]
-
+
InfSectionCommonDefObj = InfSectionCommonDef()
if TailComments != '':
InfSectionCommonDefObj.SetTailComments(TailComments)
@@ -350,8 +350,8 @@ class InfParser(InfSectionParser):
# Call section parser before section header parer to avoid encounter EDKI INF file
#
if CurrentSection == DT.MODEL_META_DATA_DEFINE:
- DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
- DefineSectionParsedFlag, SectionLines,
+ DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
+ DefineSectionParsedFlag, SectionLines,
InfSectionCommonDefObj, LineNo)
#
# Compare the new section name with current
@@ -377,52 +377,52 @@ class InfParser(InfSectionParser):
#
# End of section content collect.
# Parser the section content collected previously.
- #
+ #
if NewSectionStartFlag or LastSectionFalg:
if CurrentSection != DT.MODEL_META_DATA_DEFINE or \
- (LastSectionFalg and CurrentSection == DT.MODEL_META_DATA_DEFINE):
- DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
- DefineSectionParsedFlag, SectionLines,
+ (LastSectionFalg and CurrentSection == DT.MODEL_META_DATA_DEFINE):
+ DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
+ DefineSectionParsedFlag, SectionLines,
InfSectionCommonDefObj, LineNo)
-
+
CurrentSection = SectionType
#
# Clear section lines
#
- SectionLines = []
-
+ SectionLines = []
+
if HeaderStarLineNo == -1:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_NO_SOURCE_HEADER,
- File=self.FullPath)
+ File=self.FullPath)
if BinaryHeaderStarLineNo > -1 and HeaderStarLineNo > -1 and HeaderStarLineNo > BinaryHeaderStarLineNo:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_BINARY_HEADER_ORDER,
- File=self.FullPath)
+ File=self.FullPath)
#
# EDKII INF should not have EDKI style comment
#
if EdkCommentStartPos != -1:
- Logger.Error("InfParser",
- FORMAT_INVALID,
- ST.ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII,
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII,
File=self.FullPath,
Line=EdkCommentStartPos + 1,
ExtraData=OrigLines[EdkCommentStartPos])
-
+
+ #
+ # extract [Event] [Hob] [BootMode] sections
#
- # extract [Event] [Hob] [BootMode] sections
- #
self._ExtractEventHobBootMod(FileLinesList)
-
+
## _CheckSectionHeaders
#
#
def _CheckSectionHeaders(self, Line, LineNo):
if len(self.SectionHeaderContent) == 0:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID,
File=self.FullPath,
@@ -431,7 +431,7 @@ class InfParser(InfSectionParser):
for SectionItem in self.SectionHeaderContent:
ArchList = []
#
- # Not cover Depex/UserExtension section header
+ # Not cover Depex/UserExtension section header
# check.
#
if SectionItem[0].strip().upper() == DT.TAB_INF_FIXED_PCD.upper() or \
@@ -442,15 +442,15 @@ class InfParser(InfSectionParser):
ArchList = GetSplitValueList(SectionItem[1].strip(), ' ')
else:
ArchList = [SectionItem[1].strip()]
-
+
for Arch in ArchList:
if (not IsValidArch(Arch)) and \
(SectionItem[0].strip().upper() != DT.TAB_DEPEX.upper()) and \
(SectionItem[0].strip().upper() != DT.TAB_USER_EXTENSIONS.upper()) and \
(SectionItem[0].strip().upper() != DT.TAB_COMMON_DEFINES.upper()):
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
- ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[1]),
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[1]),
File=self.FullPath,
Line=LineNo, ExtraData=Line)
#
@@ -467,11 +467,11 @@ class InfParser(InfSectionParser):
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[2]),
File=self.FullPath, Line=LineNo,
ExtraData=Line)
-
+
## _CallSectionParsers
#
#
- def _CallSectionParsers(self, CurrentSection, DefineSectionParsedFlag,
+ def _CallSectionParsers(self, CurrentSection, DefineSectionParsedFlag,
SectionLines, InfSectionCommonDefObj, LineNo):
if CurrentSection == DT.MODEL_META_DATA_DEFINE:
if not DefineSectionParsedFlag:
@@ -481,22 +481,22 @@ class InfParser(InfSectionParser):
InfSectionCommonDefObj)
DefineSectionParsedFlag = True
else:
- Logger.Error("Parser",
- PARSER_ERROR,
- ST.ERR_INF_PARSER_MULTI_DEFINE_SECTION,
- File=self.FullPath,
+ Logger.Error("Parser",
+ PARSER_ERROR,
+ ST.ERR_INF_PARSER_MULTI_DEFINE_SECTION,
+ File=self.FullPath,
RaiseError = Logger.IS_RAISE_ERROR)
-
+
elif CurrentSection == DT.MODEL_META_DATA_BUILD_OPTION:
self.InfBuildOptionParser(SectionLines,
self.InfBuildOptionSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_LIBRARY_CLASS:
self.InfLibraryParser(SectionLines,
self.InfLibraryClassSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_META_DATA_PACKAGE:
self.InfPackageParser(SectionLines,
self.InfPackageSection,
@@ -512,37 +512,37 @@ class InfParser(InfSectionParser):
self.InfPcdParser(SectionLines,
self.InfPcdSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_SOURCE_FILE:
self.InfSourceParser(SectionLines,
self.InfSourcesSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_META_DATA_USER_EXTENSION:
self.InfUserExtensionParser(SectionLines,
self.InfUserExtensionSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_PROTOCOL:
self.InfProtocolParser(SectionLines,
self.InfProtocolSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_PPI:
self.InfPpiParser(SectionLines,
self.InfPpiSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_GUID:
self.InfGuidParser(SectionLines,
self.InfGuidSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_DEPEX:
self.InfDepexParser(SectionLines,
self.InfDepexSection,
self.FullPath)
-
+
elif CurrentSection == DT.MODEL_EFI_BINARY_FILE:
self.InfBinaryParser(SectionLines,
self.InfBinariesSection,
@@ -552,20 +552,20 @@ class InfParser(InfSectionParser):
#
else:
if len(self.SectionHeaderContent) >= 1:
- Logger.Error("Parser",
- PARSER_ERROR,
- ST.ERR_INF_PARSER_UNKNOWN_SECTION,
- File=self.FullPath, Line=LineNo,
+ Logger.Error("Parser",
+ PARSER_ERROR,
+ ST.ERR_INF_PARSER_UNKNOWN_SECTION,
+ File=self.FullPath, Line=LineNo,
RaiseError = Logger.IS_RAISE_ERROR)
else:
- Logger.Error("Parser",
- PARSER_ERROR,
- ST.ERR_INF_PARSER_NO_SECTION_ERROR,
- File=self.FullPath, Line=LineNo,
+ Logger.Error("Parser",
+ PARSER_ERROR,
+ ST.ERR_INF_PARSER_NO_SECTION_ERROR,
+ File=self.FullPath, Line=LineNo,
RaiseError = Logger.IS_RAISE_ERROR)
-
- return DefineSectionParsedFlag
-
+
+ return DefineSectionParsedFlag
+
def _ExtractEventHobBootMod(self, FileLinesList):
SpecialSectionStart = False
CheckLocation = False
@@ -607,43 +607,43 @@ class InfParser(InfSectionParser):
elif not Line:
SpecialSectionStart = False
CheckLocation = True
- Element = []
+ Element = []
else:
if not Line.startswith(DT.TAB_COMMENT_SPLIT):
- Logger.Warn("Parser",
- ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
+ Logger.Warn("Parser",
+ ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
File=self.FullPath, Line=LineNum)
SpecialSectionStart = False
CheckLocation = False
Element = []
else:
- Element.append([Line, LineNum])
+ Element.append([Line, LineNum])
else:
if CheckLocation:
if MatchObject:
CheckLocation = False
elif Line:
- Logger.Warn("Parser",
- ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
- File=self.FullPath, Line=LineNum)
+ Logger.Warn("Parser",
+ ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
+ File=self.FullPath, Line=LineNum)
CheckLocation = False
-
+
if len(self.BootModeList) >= 1:
- self.InfSpecialCommentParser(self.BootModeList,
- self.InfSpecialCommentSection,
- self.FileName,
+ self.InfSpecialCommentParser(self.BootModeList,
+ self.InfSpecialCommentSection,
+ self.FileName,
DT.TYPE_BOOTMODE_SECTION)
-
+
if len(self.EventList) >= 1:
- self.InfSpecialCommentParser(self.EventList,
+ self.InfSpecialCommentParser(self.EventList,
self.InfSpecialCommentSection,
- self.FileName,
+ self.FileName,
DT.TYPE_EVENT_SECTION)
-
+
if len(self.HobList) >= 1:
- self.InfSpecialCommentParser(self.HobList,
- self.InfSpecialCommentSection,
- self.FileName,
+ self.InfSpecialCommentParser(self.HobList,
+ self.InfSpecialCommentSection,
+ self.FileName,
DT.TYPE_HOB_SECTION)
## _ProcessLastSection
#
@@ -652,35 +652,35 @@ class InfParser(InfSectionParser):
#
# The last line is a section header. will discard it.
#
- if not (Line.startswith(DT.TAB_SECTION_START) and Line.find(DT.TAB_SECTION_END) > -1):
+ if not (Line.startswith(DT.TAB_SECTION_START) and Line.find(DT.TAB_SECTION_END) > -1):
SectionLines.append((Line, LineNo))
-
+
if len(self.SectionHeaderContent) >= 1:
TemSectionName = self.SectionHeaderContent[0][0].upper()
if TemSectionName.upper() not in gINF_SECTION_DEF.keys():
- Logger.Error("InfParser",
- FORMAT_INVALID,
- ST.ERR_INF_PARSER_UNKNOWN_SECTION,
- File=self.FullPath,
- Line=LineNo,
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UNKNOWN_SECTION,
+ File=self.FullPath,
+ Line=LineNo,
ExtraData=Line,
RaiseError = Logger.IS_RAISE_ERROR
- )
+ )
else:
CurrentSection = gINF_SECTION_DEF[TemSectionName]
self.LastSectionHeaderContent = self.SectionHeaderContent
-
+
return SectionLines, CurrentSection
## _ConvertSecNameToType
#
#
-def _ConvertSecNameToType(SectionName):
+def _ConvertSecNameToType(SectionName):
SectionType = ''
if SectionName.upper() not in gINF_SECTION_DEF.keys():
- SectionType = DT.MODEL_UNKNOWN
+ SectionType = DT.MODEL_UNKNOWN
else:
- SectionType = gINF_SECTION_DEF[SectionName.upper()]
-
- return SectionType
-
+ SectionType = gINF_SECTION_DEF[SectionName.upper()]
+
+ return SectionType
+
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
index df32225aff..78908fd984 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the miscellaneous functions for INF parser
+# This file contained the miscellaneous functions for INF parser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -59,39 +59,39 @@ gINF_SECTION_DEF = {
#
# EDK1 section
# TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE
- #
+ #
}
## InfExpandMacro
#
-# Expand MACRO definition with MACROs defined in [Defines] section and specific section.
+# Expand MACRO definition with MACROs defined in [Defines] section and specific section.
# The MACROs defined in specific section has high priority and will be expanded firstly.
#
# @param LineInfo Contain information of FileName, LineContent, LineNo
# @param GlobalMacros MACROs defined in INF [Defines] section
# @param SectionMacros MACROs defined in INF specific section
-# @param Flag If the flag set to True, need to skip macros in a quoted string
+# @param Flag If the flag set to True, need to skip macros in a quoted string
#
def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Flag=False):
if GlobalMacros is None:
GlobalMacros = {}
if SectionMacros is None:
SectionMacros = {}
-
+
FileName = LineInfo[0]
LineContent = LineInfo[1]
LineNo = LineInfo[2]
-
+
# Don't expand macros in comments
if LineContent.strip().startswith("#"):
return Content
NewLineInfo = (FileName, LineNo, LineContent)
-
+
#
# First, replace MARCOs with value defined in specific section
#
- Content = ReplaceMacro (Content,
+ Content = ReplaceMacro (Content,
SectionMacros,
False,
(LineContent, LineNo),
@@ -100,13 +100,13 @@ def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Fla
#
# Then replace MARCOs with value defined in [Defines] section
#
- Content = ReplaceMacro (Content,
+ Content = ReplaceMacro (Content,
GlobalMacros,
False,
(LineContent, LineNo),
FileName,
Flag)
-
+
MacroUsed = gMACRO_PATTERN.findall(Content)
#
# no macro found in String, stop replacing
@@ -122,9 +122,9 @@ def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Fla
#
ErrorInInf (ERR_MARCO_DEFINITION_MISS_ERROR,
LineInfo=NewLineInfo)
-
+
return Content
-
+
## IsBinaryInf
#
@@ -135,25 +135,25 @@ def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Fla
def IsBinaryInf(FileLineList):
if not FileLineList:
return False
-
+
ReIsSourcesSection = re.compile("^\s*\[Sources.*\]\s.*$", re.IGNORECASE)
ReIsBinarySection = re.compile("^\s*\[Binaries.*\]\s.*$", re.IGNORECASE)
BinarySectionFoundFlag = False
-
+
for Line in FileLineList:
if ReIsSourcesSection.match(Line):
return False
if ReIsBinarySection.match(Line):
BinarySectionFoundFlag = True
-
+
if BinarySectionFoundFlag:
return True
-
+
return False
-
-
+
+
## IsLibInstanceInfo
-#
+#
# Judge whether the string contain the information of ## @LIB_INSTANCES.
#
# @param String
@@ -166,10 +166,10 @@ def IsLibInstanceInfo(String):
return True
else:
return False
-
-
+
+
## IsAsBuildOptionInfo
-#
+#
# Judge whether the string contain the information of ## @ASBUILD.
#
# @param String
@@ -181,8 +181,8 @@ def IsAsBuildOptionInfo(String):
if ReIsAsBuildInstance.match(String):
return True
else:
- return False
-
+ return False
+
class InfParserSectionRoot(object):
def __init__(self):
@@ -190,19 +190,19 @@ class InfParserSectionRoot(object):
# Macros defined in [Define] section are file scope global
#
self.FileLocalMacros = {}
-
+
#
- # Current Section Header content.
+ # Current Section Header content.
#
self.SectionHeaderContent = []
#
- # Last time Section Header content.
+ # Last time Section Header content.
#
- self.LastSectionHeaderContent = []
-
+ self.LastSectionHeaderContent = []
+
self.FullPath = ''
-
+
self.InfDefSection = None
self.InfBuildOptionSection = None
self.InfLibraryClassSection = None
@@ -219,4 +219,4 @@ class InfParserSectionRoot(object):
self.InfSmmDepexSection = None
self.InfBinariesSection = None
self.InfHeader = None
- self.InfSpecialCommentSection = None
+ self.InfSpecialCommentSection = None
diff --git a/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
index 13535a3738..b741616c3d 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Pcds] sections in INF file
+# This file contained the parser for [Pcds] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -31,49 +31,49 @@ from Parser.InfParserMisc import InfParserSectionRoot
class InfPcdSectionParser(InfParserSectionRoot):
## Section PCD related parser
- #
+ #
# For 5 types of PCD list below, all use this function.
# 'FixedPcd', 'FeaturePcd', 'PatchPcd', 'Pcd', 'PcdEx'
#
- # This is a INF independent parser, the validation in this parser only
+ # This is a INF independent parser, the validation in this parser only
# cover
# INF spec scope, will not cross DEC/DSC to check pcd value
#
def InfPcdParser(self, SectionString, InfSectionObject, FileName):
KeysList = []
PcdList = []
- CommentsList = []
- ValueList = []
+ CommentsList = []
+ ValueList = []
#
# Current section archs
- #
+ #
LineIndex = -1
for Item in self.LastSectionHeaderContent:
if (Item[0], Item[1], Item[3]) not in KeysList:
KeysList.append((Item[0], Item[1], Item[3]))
LineIndex = Item[3]
-
+
if (Item[0].upper() == DT.TAB_INF_FIXED_PCD.upper() or \
Item[0].upper() == DT.TAB_INF_FEATURE_PCD.upper() or \
Item[0].upper() == DT.TAB_INF_PCD.upper()) and GlobalData.gIS_BINARY_INF:
Logger.Error('InfParser', FORMAT_INVALID, ST.ERR_ASBUILD_PCD_SECTION_TYPE%("\"" + Item[0] + "\""),
- File=FileName, Line=LineIndex)
-
+ File=FileName, Line=LineIndex)
+
#
# For Common INF file
#
- if not GlobalData.gIS_BINARY_INF:
+ if not GlobalData.gIS_BINARY_INF:
#
- # Macro defined in this section
+ # Macro defined in this section
#
- SectionMacros = {}
+ SectionMacros = {}
for Line in SectionString:
PcdLineContent = Line[0]
PcdLineNo = Line[1]
if PcdLineContent.strip() == '':
CommentsList = []
- continue
-
+ continue
+
if PcdLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(Line)
continue
@@ -83,10 +83,10 @@ class InfPcdSectionParser(InfParserSectionRoot):
#
if PcdLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
CommentsList.append((
- PcdLineContent[PcdLineContent.find(DT.TAB_COMMENT_SPLIT):],
+ PcdLineContent[PcdLineContent.find(DT.TAB_COMMENT_SPLIT):],
PcdLineNo))
- PcdLineContent = PcdLineContent[:PcdLineContent.find(DT.TAB_COMMENT_SPLIT)]
-
+ PcdLineContent = PcdLineContent[:PcdLineContent.find(DT.TAB_COMMENT_SPLIT)]
+
if PcdLineContent != '':
#
# Find Macro
@@ -100,23 +100,23 @@ class InfPcdSectionParser(InfParserSectionRoot):
ValueList = []
CommentsList = []
continue
-
+
PcdEntryReturn = SplitPcdEntry(PcdLineContent)
-
+
if not PcdEntryReturn[1]:
- TokenList = ['']
+ TokenList = ['']
else:
TokenList = PcdEntryReturn[0]
-
+
ValueList[0:len(TokenList)] = TokenList
-
+
#
# Replace with Local section Macro and [Defines] section Macro.
- #
- ValueList = [InfExpandMacro(Value, (FileName, PcdLineContent, PcdLineNo),
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, PcdLineContent, PcdLineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
-
+
if len(ValueList) >= 1:
PcdList.append((ValueList, CommentsList, (PcdLineContent, PcdLineNo, FileName)))
ValueList = []
@@ -129,11 +129,11 @@ class InfPcdSectionParser(InfParserSectionRoot):
for Line in SectionString:
LineContent = Line[0].strip()
LineNo = Line[1]
-
+
if LineContent == '':
CommentsList = []
continue
-
+
if LineContent.startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(LineContent)
continue
@@ -144,7 +144,7 @@ class InfPcdSectionParser(InfParserSectionRoot):
if CommentIndex > -1:
CommentsList.append(LineContent[CommentIndex+1:])
LineContent = LineContent[:CommentIndex]
-
+
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT)
#
# PatchablePcd
@@ -152,33 +152,33 @@ class InfPcdSectionParser(InfParserSectionRoot):
#
if KeysList[0][0].upper() == DT.TAB_INF_PATCH_PCD.upper():
if len(TokenList) != 3:
- Logger.Error('InfParser',
- FORMAT_INVALID,
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
ST.ERR_ASBUILD_PATCHPCD_FORMAT_INVALID,
File=FileName,
Line=LineNo,
ExtraData=LineContent)
- #
+ #
elif KeysList[0][0].upper() == DT.TAB_INF_PCD_EX.upper():
if len(TokenList) != 1:
- Logger.Error('InfParser',
- FORMAT_INVALID,
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
ST.ERR_ASBUILD_PCDEX_FORMAT_INVALID,
File=FileName,
Line=LineNo,
ExtraData=LineContent)
ValueList[0:len(TokenList)] = TokenList
- if len(ValueList) >= 1:
- PcdList.append((ValueList, CommentsList, (LineContent, LineNo, FileName)))
+ if len(ValueList) >= 1:
+ PcdList.append((ValueList, CommentsList, (LineContent, LineNo, FileName)))
ValueList = []
CommentsList = []
- continue
-
- if not InfSectionObject.SetPcds(PcdList, KeysList = KeysList,
+ continue
+
+ if not InfSectionObject.SetPcds(PcdList, KeysList = KeysList,
PackageInfo = self.InfPackageSection.GetPackages()):
- Logger.Error('InfParser',
- FORMAT_INVALID,
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[PCD]"),
File=FileName,
- Line=LineIndex)
- \ No newline at end of file
+ Line=LineIndex)
+
diff --git a/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py
index 8ba4c3fc08..1f254058d1 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for sections in INF file
+# This file contained the parser for sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -95,7 +95,7 @@ def GetSpecialStr2(ItemList, FileName, LineNo, SectionString):
if ItemList[3] != '':
Logger.Error('Parser', FORMAT_INVALID, ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID \
% (SectionString), File=FileName, Line=LineNo, ExtraData=SectionString)
-
+
if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
Str2 = ItemList[2] + ' | ' + ItemList[3]
else:
@@ -108,7 +108,7 @@ def GetSpecialStr2(ItemList, FileName, LineNo, SectionString):
return Str2
## ProcessUseExtHeader
-#
+#
#
def ProcessUseExtHeader(ItemList):
NewItemList = []
@@ -138,12 +138,12 @@ def ProcessUseExtHeader(ItemList):
NewItemList.append(Item)
else:
AppendContent = AppendContent + "." + Item
-
+
if len(NewItemList) > 4:
return False, []
-
+
return True, NewItemList
-
+
## GetArch
#
# GetArch
@@ -244,7 +244,7 @@ class InfSectionParser(InfDefinSectionParser,
#
# A List for store define section content.
- #
+ #
self._PcdNameList = []
self._SectionName = ''
self._SectionType = 0
@@ -253,7 +253,7 @@ class InfSectionParser(InfDefinSectionParser,
#
# File Header content parser
- #
+ #
def InfHeaderParser(self, Content, InfHeaderObject2, FileName, IsBinaryHeader = False):
if IsBinaryHeader:
(Abstract, Description, Copyright, License) = ParseHeaderCommentSection(Content, FileName, True)
@@ -272,7 +272,7 @@ class InfSectionParser(InfDefinSectionParser,
#
# Insert Abstract, Description, CopyRight, License into header object
- #
+ #
InfHeaderObject2.SetAbstract(Abstract)
InfHeaderObject2.SetDescription(Description)
InfHeaderObject2.SetCopyright(Copyright)
@@ -287,7 +287,7 @@ class InfSectionParser(InfDefinSectionParser,
#
# [section_name.arch<.platform|module_type>]
#
- # @param String A string contained the content need to be parsed.
+ # @param String A string contained the content need to be parsed.
#
def SectionHeaderParser(self, SectionString, FileName, LineNo):
_Scope = []
@@ -313,7 +313,7 @@ class InfSectionParser(InfDefinSectionParser,
#
# different section should not mix in one section
# Allow different PCD type sections mixed together
- #
+ #
if _SectionName.upper() not in _PcdNameList:
if _SectionName != '' and _SectionName.upper() != ItemList[0].upper():
Logger.Error('Parser',
@@ -350,7 +350,7 @@ class InfSectionParser(InfDefinSectionParser,
#
# For [Defines] section, do special check.
- #
+ #
if ItemList[0].upper() == DT.TAB_COMMON_DEFINES.upper():
if len(ItemList) != 1:
Logger.Error('Parser',
@@ -360,26 +360,26 @@ class InfSectionParser(InfDefinSectionParser,
#
# For [UserExtension] section, do special check.
- #
+ #
if ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
-
+
RetValue = ProcessUseExtHeader(ItemList)
-
+
if not RetValue[0]:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (SectionString),
File=FileName, Line=LineNo, ExtraData=SectionString)
else:
- ItemList = RetValue[1]
-
+ ItemList = RetValue[1]
+
if len(ItemList) == 3:
ItemList.append('COMMON')
-
+
Str1 = ItemList[1]
#
- # For Library classes, need to check module type.
+ # For Library classes, need to check module type.
#
if ItemList[0].upper() == DT.TAB_LIBRARY_CLASSES.upper() and len(ItemList) == 3:
if ItemList[2] != '':
@@ -424,10 +424,10 @@ class InfSectionParser(InfDefinSectionParser,
## GenSpecialSectionList
#
- # @param SpecialSectionList: a list of list, of which item's format
+ # @param SpecialSectionList: a list of list, of which item's format
# (Comment, LineNum)
# @param ContainerFile: Input value for filename of Inf file
- #
+ #
def InfSpecialCommentParser (self, SpecialSectionList, InfSectionObject, ContainerFile, SectionType):
ReFindSpecialCommentRe = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
ReFindHobArchRe = re.compile(r"""[Hh][Oo][Bb]\.([^,]*)""", re.DOTALL)
diff --git a/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
index 645c2c3414..6b94394508 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
@@ -1,11 +1,11 @@
## @file
-# This file contained the parser for [Sources] sections in INF file
+# This file contained the parser for [Sources] sections in INF file
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -31,7 +31,7 @@ from Parser.InfParserMisc import InfParserSectionRoot
class InfSourceSectionParser(InfParserSectionRoot):
## InfSourceParser
#
- #
+ #
def InfSourceParser(self, SectionString, InfSectionObject, FileName):
SectionMacros = {}
ValueList = []
@@ -43,12 +43,12 @@ class InfSourceSectionParser(InfParserSectionRoot):
for Line in SectionString:
SrcLineContent = Line[0]
SrcLineNo = Line[1]
-
+
if SrcLineContent.strip() == '':
continue
-
+
#
- # Found Header Comments
+ # Found Header Comments
#
if SrcLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
@@ -59,7 +59,7 @@ class InfSourceSectionParser(InfParserSectionRoot):
SectionContent += SrcLineContent + DT.END_OF_LINE
continue
#
- # First time encounter comment
+ # First time encounter comment
#
else:
#
@@ -68,18 +68,18 @@ class InfSourceSectionParser(InfParserSectionRoot):
HeaderComments = []
HeaderComments.append(Line)
StillCommentFalg = True
- SectionContent += SrcLineContent + DT.END_OF_LINE
+ SectionContent += SrcLineContent + DT.END_OF_LINE
continue
else:
StillCommentFalg = False
-
+
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
-
+
#
# Find Tail comment.
#
@@ -89,11 +89,11 @@ class InfSourceSectionParser(InfParserSectionRoot):
if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
-
+
#
# Find Macro
#
- Name, Value = MacroParser((SrcLineContent, SrcLineNo),
+ Name, Value = MacroParser((SrcLineContent, SrcLineNo),
FileName,
DT.MODEL_EFI_SOURCE_FILE,
self.FileLocalMacros)
@@ -102,44 +102,44 @@ class InfSourceSectionParser(InfParserSectionRoot):
LineComment = None
HeaderComments = []
continue
-
+
#
# Replace with Local section Macro and [Defines] section Macro.
- #
- SrcLineContent = InfExpandMacro(SrcLineContent,
- (FileName, SrcLineContent, SrcLineNo),
- self.FileLocalMacros,
+ #
+ SrcLineContent = InfExpandMacro(SrcLineContent,
+ (FileName, SrcLineContent, SrcLineNo),
+ self.FileLocalMacros,
SectionMacros)
TokenList = GetSplitValueList(SrcLineContent, DT.TAB_VALUE_SPLIT, 4)
ValueList[0:len(TokenList)] = TokenList
-
+
#
# Store section content string after MACRO replaced.
#
- SectionContent += SrcLineContent + DT.END_OF_LINE
-
- SourceList.append((ValueList, LineComment,
+ SectionContent += SrcLineContent + DT.END_OF_LINE
+
+ SourceList.append((ValueList, LineComment,
(SrcLineContent, SrcLineNo, FileName)))
ValueList = []
LineComment = None
TailComments = ''
HeaderComments = []
continue
-
+
#
# Current section archs
#
ArchList = []
for Item in self.LastSectionHeaderContent:
if Item[1] not in ArchList:
- ArchList.append(Item[1])
+ ArchList.append(Item[1])
InfSectionObject.SetSupArchList(Item[1])
-
- InfSectionObject.SetAllContent(SectionContent)
+
+ InfSectionObject.SetAllContent(SectionContent)
if not InfSectionObject.SetSources(SourceList, Arch = ArchList):
- Logger.Error('InfParser',
+ Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Sources]"),
- File=FileName,
- Line=Item[3]) \ No newline at end of file
+ File=FileName,
+ Line=Item[3])
diff --git a/BaseTools/Source/Python/UPT/Parser/__init__.py b/BaseTools/Source/Python/UPT/Parser/__init__.py
index 151f9ed433..e98b8a6913 100644
--- a/BaseTools/Source/Python/UPT/Parser/__init__.py
+++ b/BaseTools/Source/Python/UPT/Parser/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Parser
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
index 941dd4a398..5dc00affad 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
@@ -1,11 +1,11 @@
## @file DecPomAlignment.py
# This file contained the adapter for convert INF parser object to POM Object
#
-# Copyright (c) 2011 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -106,17 +106,17 @@ class DecPomAlignment(PackageObject):
self.DecParser = None
self.UniFileClassObject = None
self.PcdDefaultValueDict = {}
-
+
#
# Load Dec file
#
self.LoadDecFile(Filename)
-
+
#
# Transfer to Package Object if IsToPackage is True
#
self.DecToPackage()
-
+
## Load Dec file
#
# Load the file if it exists
@@ -132,13 +132,13 @@ class DecPomAlignment(PackageObject):
self.SetFullPath(Filename)
self.SetRelaPath(Path)
self.SetFileName(Name)
- self.SetPackagePath(GetRelativePath(Path, self.WorkspaceDir))
+ self.SetPackagePath(GetRelativePath(Path, self.WorkspaceDir))
self.SetCombinePath(GetRelativePath(Filename, self.WorkspaceDir))
-
+
self.DecParser = Dec(Filename)
-
+
## Transfer to Package Object
- #
+ #
# Transfer all contents of a Dec file to a standard Package Object
#
def DecToPackage(self):
@@ -146,12 +146,12 @@ class DecPomAlignment(PackageObject):
# Init global information for the file
#
ContainerFile = self.GetFullPath()
-
+
#
# Generate Package Header
#
self.GenPackageHeader(ContainerFile)
-
+
#
# Generate Includes
#
@@ -171,28 +171,28 @@ class DecPomAlignment(PackageObject):
# Generate Ppis
#
self.GenGuidProtocolPpis(TAB_PPIS, ContainerFile)
-
+
#
# Generate LibraryClasses
#
self.GenLibraryClasses(ContainerFile)
-
+
#
# Generate Pcds
#
self.GenPcds(ContainerFile)
-
+
#
- # Generate Module File list, will be used later on to generate
+ # Generate Module File list, will be used later on to generate
# distribution
#
self.GenModuleFileList(ContainerFile)
-
+
#
# Generate user extensions
#
self.GenUserExtensions()
-
+
## Generate user extension
#
#
@@ -231,7 +231,7 @@ class DecPomAlignment(PackageObject):
PrivateUserExtension.SetIdentifier(DT.TAB_PRIVATE)
PrivateUserExtension.SetUserID(DT.TAB_INTEL)
self.SetUserExtensionList(self.GetUserExtensionList() + [PrivateUserExtension])
-
+
## Generate miscellaneous files on DEC file
#
#
@@ -249,23 +249,23 @@ class DecPomAlignment(PackageObject):
FileObj.SetURI(FileName)
MiscFileObj.SetFileList(MiscFileObj.GetFileList()+[FileObj])
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Line),
File=self.GetFileName(),
- ExtraData=Line)
- self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
-
+ ExtraData=Line)
+ self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
+
## Generate Package Header
#
# Gen Package Header of Dec as <Key> = <Value>
#
- # @param ContainerFile: The Dec file full path
+ # @param ContainerFile: The Dec file full path
#
def GenPackageHeader(self, ContainerFile):
Logger.Debug(2, "Generate PackageHeader ...")
DefinesDict = {}
-
+
#
# Update all defines item in database
#
@@ -293,12 +293,12 @@ class DecPomAlignment(PackageObject):
UniFileClassObject([PathClass(os.path.join(DefObj.GetPackagePath(), DefObj.GetPackageUniFile()))])
else:
self.UniFileClassObject = None
-
+
if DefinesDict:
UserExtension = UserExtensionObject()
UserExtension.SetDefinesDict(DefinesDict)
UserExtension.SetIdentifier('DefineModifiers')
- UserExtension.SetUserID('EDK2')
+ UserExtension.SetUserID('EDK2')
self.SetUserExtensionList(
self.GetUserExtensionList() + [UserExtension]
)
@@ -321,7 +321,7 @@ class DecPomAlignment(PackageObject):
self.SetCopyright(('', Copyright))
if License:
self.SetLicense(('', License))
-
+
#
# Get Binary header information
#
@@ -329,7 +329,7 @@ class DecPomAlignment(PackageObject):
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(self.DecParser.BinaryHeadComment,
ContainerFile, True)
-
+
if not Abstract or not Description or not Copyright or not License:
Logger.Error('MkPkg',
FORMAT_INVALID,
@@ -343,7 +343,7 @@ class DecPomAlignment(PackageObject):
BinaryAbstractList = []
BinaryDescriptionList = []
-
+
#Get Binary header from UNI file
# Initialize the UniStrDict dictionary, top keys are language codes
UniStrDict = {}
@@ -359,7 +359,7 @@ class DecPomAlignment(PackageObject):
if StringDefClassObject.StringName == TAB_DEC_BINARY_DESCRIPTION:
if (Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)) \
not in self.GetBinaryHeaderDescription():
- BinaryDescriptionList.append((Lang,
+ BinaryDescriptionList.append((Lang,
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
#Combine Binary header from DEC file and UNI file
BinaryAbstractList = self.GetBinaryHeaderAbstract() + BinaryAbstractList
@@ -376,13 +376,13 @@ class DecPomAlignment(PackageObject):
BinaryUserExtension.SetIdentifier(TAB_BINARY_HEADER_IDENTIFIER)
BinaryUserExtension.SetUserID(TAB_BINARY_HEADER_USERID)
self.SetUserExtensionList(self.GetUserExtensionList() + [BinaryUserExtension])
-
-
+
+
## GenIncludes
#
# Gen Includes of Dec
- #
- # @param ContainerFile: The Dec file full path
+ #
+ # @param ContainerFile: The Dec file full path
#
def GenIncludes(self, ContainerFile):
if ContainerFile:
@@ -402,17 +402,17 @@ class DecPomAlignment(PackageObject):
IncludesDict[IncludePath] = IncludesDict[IncludePath] + Item.GetArchList()
else:
IncludesDict[IncludePath] = Item.GetArchList()
-
- #
+
+ #
# get the standardIncludeFileList(industry), packageIncludeFileList
- # (others) for PackageObject
- #
+ # (others) for PackageObject
+ #
PackagePath = os.path.split(self.GetFullPath())[0]
IncludePathList = \
sorted([os.path.normpath(Path) + sep for Path in IncludesDict.keys()])
-
+
#
- # get a non-overlap set of include path, IncludePathList should be
+ # get a non-overlap set of include path, IncludePathList should be
# sorted, and path should be end with path seperator '\'
#
NonOverLapList = []
@@ -429,7 +429,7 @@ class DecPomAlignment(PackageObject):
#
for IndexN in range (0, len(IncludePathList)):
IncludePathList[IndexN] = os.path.normpath(IncludePathList[IndexN])
- IncludePathList.sort()
+ IncludePathList.sort()
IncludePathList.reverse()
#
# save the include path list for later usage
@@ -437,7 +437,7 @@ class DecPomAlignment(PackageObject):
self.SetIncludePathList(IncludePathList)
StandardIncludeFileList = []
PackageIncludeFileList = []
-
+
IncludeFileList = []
for Path in NonOverLapList:
FileList = GetFiles(os.path.join(PackagePath, Path), ['CVS', '.svn'], False)
@@ -445,9 +445,9 @@ class DecPomAlignment(PackageObject):
for Includefile in IncludeFileList:
ExtName = os.path.splitext(Includefile)[1]
if ExtName.upper() == '.DEC' and self.CheckMulDec:
- Logger.Error('MkPkg',
+ Logger.Error('MkPkg',
UPT_MUL_DEC_ERROR,
- ST.ERR_MUL_DEC_ERROR%(os.path.dirname(ContainerFile),
+ ST.ERR_MUL_DEC_ERROR%(os.path.dirname(ContainerFile),
os.path.basename(ContainerFile),
Includefile))
@@ -462,15 +462,15 @@ class DecPomAlignment(PackageObject):
if Includefile.find('IndustryStandard') != -1:
StandardIncludeFileList.append(Include)
else:
- PackageIncludeFileList.append(Include)
-
+ PackageIncludeFileList.append(Include)
+
self.SetStandardIncludeFileList(StandardIncludeFileList)
#
# put include path into the PackageIncludeFileList
#
PackagePathList = []
- IncObj = self.DecParser.GetIncludeSectionObject()
+ IncObj = self.DecParser.GetIncludeSectionObject()
for Item in IncObj.GetAllIncludes():
IncludePath = Item.File
Include = IncludeObject()
@@ -478,13 +478,13 @@ class DecPomAlignment(PackageObject):
Include.SetSupArchList(Item.GetArchList())
PackagePathList.append(Include)
self.SetPackageIncludeFileList(PackagePathList + PackageIncludeFileList)
-
+
## GenPpis
#
# Gen Ppis of Dec
# <CName>=<GuidValue>
#
- # @param ContainerFile: The Dec file full path
+ # @param ContainerFile: The Dec file full path
#
def GenGuidProtocolPpis(self, Type, ContainerFile):
if ContainerFile:
@@ -504,7 +504,7 @@ class DecPomAlignment(PackageObject):
Factory = CreateGuidObject
elif Type == TAB_PROTOCOLS:
Obj = self.DecParser.GetProtocolSectionObject()
-
+
def CreateProtocolObject():
return ProtocolObject()
Factory = CreateProtocolObject
@@ -519,9 +519,9 @@ class DecPomAlignment(PackageObject):
# Should not be here
#
return
-
+
DeclarationsList = []
-
+
#
# Go through each arch
#
@@ -530,7 +530,7 @@ class DecPomAlignment(PackageObject):
Value = Item.GuidString
HelpTxt = ParseGenericComment(Item.GetHeadComment() + \
Item.GetTailComment())
-
+
ListObject = Factory()
ListObject.SetCName(Name)
ListObject.SetGuid(Value)
@@ -539,10 +539,10 @@ class DecPomAlignment(PackageObject):
if self.UniFileClassObject:
HelpTxt.SetLang(TAB_LANGUAGE_EN_X)
ListObject.SetHelpTextList([HelpTxt])
-
+
DeclarationsList.append(ListObject)
- #
+ #
#GuidTypeList is abstracted from help
#
if Type == TAB_GUIDS:
@@ -551,20 +551,20 @@ class DecPomAlignment(PackageObject):
self.SetProtocolList(self.GetProtocolList() + DeclarationsList)
elif Type == TAB_PPIS:
self.SetPpiList(self.GetPpiList() + DeclarationsList)
-
+
## GenLibraryClasses
#
# Gen LibraryClasses of Dec
# <CName>=<GuidValue>
#
- # @param ContainerFile: The Dec file full path
+ # @param ContainerFile: The Dec file full path
#
def GenLibraryClasses(self, ContainerFile):
if ContainerFile:
pass
Logger.Debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
LibraryClassDeclarations = []
-
+
LibObj = self.DecParser.GetLibraryClassSectionObject()
for Item in LibObj.GetAllLibraryclasses():
LibraryClass = LibraryClassObject()
@@ -578,44 +578,44 @@ class DecPomAlignment(PackageObject):
HelpTxt.SetLang(TAB_LANGUAGE_EN_X)
LibraryClass.SetHelpTextList([HelpTxt])
LibraryClassDeclarations.append(LibraryClass)
-
+
self.SetLibraryClassList(self.GetLibraryClassList() + \
LibraryClassDeclarations)
-
+
## GenPcds
#
# Gen Pcds of Dec
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
#
- # @param ContainerFile: The Dec file full path
+ # @param ContainerFile: The Dec file full path
#
def GenPcds(self, ContainerFile):
Logger.Debug(2, "Generate %s ..." % TAB_PCDS)
PcdObj = self.DecParser.GetPcdSectionObject()
#
# Get all Pcds
- #
+ #
PcdDeclarations = []
IterList = [
(TAB_PCDS_FIXED_AT_BUILD_NULL, 'FixedPcd'),
- (TAB_PCDS_PATCHABLE_IN_MODULE_NULL, 'PatchPcd'),
+ (TAB_PCDS_PATCHABLE_IN_MODULE_NULL, 'PatchPcd'),
(TAB_PCDS_FEATURE_FLAG_NULL, 'FeaturePcd'),
- (TAB_PCDS_DYNAMIC_EX_NULL, 'PcdEx'),
+ (TAB_PCDS_DYNAMIC_EX_NULL, 'PcdEx'),
(TAB_PCDS_DYNAMIC_NULL, 'Pcd')]
-
+
PromptStrList = []
HelpStrList = []
PcdErrStrList = []
# Initialize UniStrDict dictionary, top keys are language codes
UniStrDict = {}
StrList = []
-
+
Language = ''
if self.UniFileClassObject:
Language = TAB_LANGUAGE_EN_X
else:
Language = TAB_LANGUAGE_EN_US
-
+
if self.UniFileClassObject:
UniStrDict = self.UniFileClassObject.OrderedStringList
for Lang in UniStrDict:
@@ -652,7 +652,7 @@ class DecPomAlignment(PackageObject):
self.DecParser.GetDefineSectionMacro()
)
PcdDeclaration.SetSupArchList(Item.GetArchListOfType(PcdType))
-
+
#
# Get PCD error message from PCD error comment section in DEC file
#
@@ -662,7 +662,7 @@ class DecPomAlignment(PackageObject):
Key = (PcdDeclaration.GetTokenSpaceGuidCName(), PcdErr.GetErrorNumber())
PcdErr.SetErrorMessageList(PcdErr.GetErrorMessageList() + \
[(Language, self.DecParser.PcdErrorCommentDict[Key])])
-
+
for Index in range(0, len(PromptStrList)):
StrNameList = PromptStrList[Index][1].split('_')
if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
@@ -676,7 +676,7 @@ class DecPomAlignment(PackageObject):
break
else:
PcdDeclaration.SetPromptList(PcdDeclaration.GetPromptList() + [TxtObj])
-
+
for Index in range(0, len(HelpStrList)):
StrNameList = HelpStrList[Index][1].split('_')
if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
@@ -703,7 +703,7 @@ class DecPomAlignment(PackageObject):
(PcdErrStrList[Index][0], PcdErrStrList[Index][2]) not in PcdErr.GetErrorMessageList():
PcdErr.SetErrorMessageList(PcdErr.GetErrorMessageList() + \
[(PcdErrStrList[Index][0], PcdErrStrList[Index][2])])
-
+
#
# Check to prevent missing error message if a Pcd has the error code.
#
@@ -715,12 +715,12 @@ class DecPomAlignment(PackageObject):
ST.ERR_DECPARSE_PCD_UNMATCHED_ERRORCODE % PcdErr.GetErrorNumber(),
ContainerFile,
PcdErr.GetLineNum(),
- PcdErr.GetFileLine())
-
+ PcdErr.GetFileLine())
+
PcdDeclarations.append(PcdDeclaration)
self.SetPcdList(self.GetPcdList() + PcdDeclarations)
self.CheckPcdValue()
-
+
##
# Get error message via language
# @param ErrorMessageList: Error message tuple list the language and its message
@@ -741,17 +741,17 @@ class DecPomAlignment(PackageObject):
return ErrorMessageList[0][1]
except IndexError:
return ''
- return ''
-
+ return ''
+
##
# Replace the strings for Python eval function.
- # @param ReplaceValue: The string that needs to be replaced.
- # @return: The string was replaced, then eval function is always making out it.
+ # @param ReplaceValue: The string that needs to be replaced.
+ # @return: The string was replaced, then eval function is always making out it.
def ReplaceForEval(self, ReplaceValue, IsRange=False, IsExpr=False):
if self.FullPath:
pass
#
- # deal with "NOT EQ", "NOT LT", "NOT GT", "NOT LE", "NOT GE", "NOT NOT"
+ # deal with "NOT EQ", "NOT LT", "NOT GT", "NOT LE", "NOT GE", "NOT NOT"
#
NOTNOT_Pattern = '[\t\s]*NOT[\t\s]+NOT[\t\s]*'
NOTGE_Pattern = '[\t\s]*NOT[\t\s]+GE[\t\s]*'
@@ -765,7 +765,7 @@ class DecPomAlignment(PackageObject):
ReplaceValue = re.compile(NOTLE_Pattern).sub('x > ', ReplaceValue)
ReplaceValue = re.compile(NOTGE_Pattern).sub('x < ', ReplaceValue)
ReplaceValue = re.compile(NOTEQ_Pattern).sub('x != ', ReplaceValue)
-
+
if IsRange:
ReplaceValue = ReplaceValue.replace('EQ', 'x ==')
ReplaceValue = ReplaceValue.replace('LT', 'x <')
@@ -779,9 +779,9 @@ class DecPomAlignment(PackageObject):
ReplaceValue = ReplaceValue.replace('LT', '<')
ReplaceValue = ReplaceValue.replace('LE', '<=')
ReplaceValue = ReplaceValue.replace('GT', '>')
- ReplaceValue = ReplaceValue.replace('GE', '>=')
- ReplaceValue = ReplaceValue.replace('XOR', '^')
-
+ ReplaceValue = ReplaceValue.replace('GE', '>=')
+ ReplaceValue = ReplaceValue.replace('XOR', '^')
+
ReplaceValue = ReplaceValue.replace('AND', 'and')
ReplaceValue = ReplaceValue.replace('&&', ' and ')
ReplaceValue = ReplaceValue.replace('xor', '^')
@@ -789,7 +789,7 @@ class DecPomAlignment(PackageObject):
ReplaceValue = ReplaceValue.replace('||', ' or ')
ReplaceValue = ReplaceValue.replace('NOT', 'not')
if ReplaceValue.find('!') >= 0 and ReplaceValue[ReplaceValue.index('!') + 1] != '=':
- ReplaceValue = ReplaceValue.replace('!', ' not ')
+ ReplaceValue = ReplaceValue.replace('!', ' not ')
if '.' in ReplaceValue:
Pattern = '[a-zA-Z0-9]{1,}\.[a-zA-Z0-9]{1,}'
MatchedList = re.findall(Pattern, ReplaceValue)
@@ -797,7 +797,7 @@ class DecPomAlignment(PackageObject):
if MatchedItem not in self.PcdDefaultValueDict:
Logger.Error("Dec File Parser", FORMAT_INVALID, Message=ST.ERR_DECPARSE_PCD_NODEFINED % MatchedItem,
File=self.FullPath)
-
+
ReplaceValue = ReplaceValue.replace(MatchedItem, self.PcdDefaultValueDict[MatchedItem])
return ReplaceValue
@@ -809,7 +809,7 @@ class DecPomAlignment(PackageObject):
for Pcd in self.GetPcdList():
self.PcdDefaultValueDict[TAB_SPLIT.join((Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName())).strip()] = \
Pcd.GetDefaultValue()
-
+
for Pcd in self.GetPcdList():
ValidationExpressions = []
PcdGuidName = TAB_SPLIT.join((Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()))
@@ -825,20 +825,20 @@ class DecPomAlignment(PackageObject):
if QuotedMatchedObj:
MatchedStr = QuotedMatchedObj.group().strip()
if MatchedStr.startswith('L'):
- Expression = Expression.replace(MatchedStr, MatchedStr[1:].strip())
+ Expression = Expression.replace(MatchedStr, MatchedStr[1:].strip())
Expression = self.ReplaceForEval(Expression, IsExpr=True)
Expression = Expression.replace(PcdGuidName, 'x')
Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
- ValidationExpressions.append((Expression, Message))
-
+ ValidationExpressions.append((Expression, Message))
+
ValidList = Valid.GetValidValue()
if ValidList:
ValidValue = 'x in %s' % [eval(v) for v in ValidList.split(' ') if v]
Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
ValidationExpressions.append((ValidValue, Message))
-
- ValidValueRange = Valid.GetValidValueRange()
+
+ ValidValueRange = Valid.GetValidValueRange()
if ValidValueRange:
ValidValueRange = self.ReplaceForEval(ValidValueRange, IsRange=True)
if ValidValueRange.find('-') >= 0:
@@ -848,7 +848,7 @@ class DecPomAlignment(PackageObject):
ValidValueRange = 'x %s' % ValidValueRange
Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
ValidationExpressions.append((ValidValueRange, Message))
-
+
DefaultValue = self.PcdDefaultValueDict[PcdGuidName.strip()]
#
# Delete the 'L' prefix of a quoted string, this operation is for eval()
@@ -859,7 +859,7 @@ class DecPomAlignment(PackageObject):
MatchedStr = QuotedMatchedObj.group().strip()
if MatchedStr.startswith('L'):
DefaultValue = DefaultValue.replace(MatchedStr, MatchedStr[1:].strip())
-
+
try:
DefaultValue = eval(DefaultValue.replace('TRUE', 'True').replace('true', 'True')
.replace('FALSE', 'False').replace('false', 'False'))
@@ -877,11 +877,11 @@ class DecPomAlignment(PackageObject):
## GenModuleFileList
#
- def GenModuleFileList(self, ContainerFile):
+ def GenModuleFileList(self, ContainerFile):
ModuleFileList = []
ContainerFileName = os.path.basename(ContainerFile)
ContainerFilePath = os.path.dirname(ContainerFile)
- for Item in GetFiles(ContainerFilePath,
+ for Item in GetFiles(ContainerFilePath,
['CVS', '.svn'] + self.GetIncludePathList(), False):
ExtName = os.path.splitext(Item)[1]
if ExtName.lower() == '.inf':
@@ -889,14 +889,14 @@ class DecPomAlignment(PackageObject):
elif ExtName.upper() == '.DEC' and self.CheckMulDec:
if Item == ContainerFileName:
continue
- Logger.Error('MkPkg',
+ Logger.Error('MkPkg',
UPT_MUL_DEC_ERROR,
- ST.ERR_MUL_DEC_ERROR%(ContainerFilePath,
- ContainerFileName,
+ ST.ERR_MUL_DEC_ERROR%(ContainerFilePath,
+ ContainerFileName,
Item))
-
+
self.SetModuleFileList(ModuleFileList)
-
+
## Show detailed information of Package
#
# Print all members and their values of Package class
@@ -906,7 +906,7 @@ class DecPomAlignment(PackageObject):
print('\nBaseName =', self.GetBaseName())
print('\nVersion =', self.GetVersion())
print('\nGuid =', self.GetGuid())
-
+
print('\nStandardIncludes = %d ' \
% len(self.GetStandardIncludeFileList()), end=' ')
for Item in self.GetStandardIncludeFileList():
@@ -915,7 +915,7 @@ class DecPomAlignment(PackageObject):
% len(self.GetPackageIncludeFileList()), end=' ')
for Item in self.GetPackageIncludeFileList():
print(Item.GetFilePath(), ' ', Item.GetSupArchList())
-
+
print('\nGuids =', self.GetGuidList())
for Item in self.GetGuidList():
print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
@@ -937,7 +937,7 @@ class DecPomAlignment(PackageObject):
'ValidUsage=', Item.GetValidUsage(), \
'SupArchList', Item.GetSupArchList(), \
'Token=', Item.GetToken(), 'DatumType=', Item.GetDatumType())
-
+
for Item in self.GetMiscFileList():
print(Item.GetName())
for FileObjectItem in Item.GetFileList():
@@ -947,11 +947,11 @@ class DecPomAlignment(PackageObject):
## GenPcdDeclaration
#
# @param ContainerFile: File name of the DEC file
-# @param PcdInfo: Pcd information, of format (TokenGuidCName,
-# TokenName, Value, DatumType, Token, Type,
+# @param PcdInfo: Pcd information, of format (TokenGuidCName,
+# TokenName, Value, DatumType, Token, Type,
# GenericComment, TailComment, Arch)
-# @param Language: The language of HelpText, Prompt
-#
+# @param Language: The language of HelpText, Prompt
+#
def GenPcdDeclaration(ContainerFile, PcdInfo, Language, MacroReplaceDict):
HelpStr = ''
PromptStr = ''
@@ -973,28 +973,28 @@ def GenPcdDeclaration(ContainerFile, PcdInfo, Language, MacroReplaceDict):
SupArchList = [Arch]
Pcd.SetSupArchList(SupArchList)
-
+
if GenericComment:
- HelpStr, PcdErrList, PromptStr = ParseDecPcdGenericComment(GenericComment,
- ContainerFile,
- TokenGuidCName,
+ HelpStr, PcdErrList, PromptStr = ParseDecPcdGenericComment(GenericComment,
+ ContainerFile,
+ TokenGuidCName,
TokenName,
MacroReplaceDict)
if PcdErrList:
Pcd.SetPcdErrorsList(PcdErrList)
if TailComment:
- SupModuleList, TailHelpStr = ParseDecPcdTailComment(TailComment,
+ SupModuleList, TailHelpStr = ParseDecPcdTailComment(TailComment,
ContainerFile)
if SupModuleList:
Pcd.SetSupModuleList(SupModuleList)
-
+
if HelpStr and (not HelpStr.endswith('\n')) and TailHelpStr:
HelpStr += '\n'
HelpStr += TailHelpStr
if HelpStr:
HelpTxtObj = TextObject()
- HelpTxtObj.SetLang(Language)
+ HelpTxtObj.SetLang(Language)
HelpTxtObj.SetString(HelpStr)
Pcd.SetHelpTextList([HelpTxtObj])
if PromptStr:
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
index 84f0d43f01..c055089f2c 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -59,8 +59,8 @@ from Common.MultipleWorkspace import MultipleWorkspace as mws
#
class InfPomAlignment(ModuleObject):
## Construct of InfPomAlignment
- # Skip means that UPT don't care the syntax of INF, this may be the not
- # distributed INF files during creation or the INF files checked for
+ # Skip means that UPT don't care the syntax of INF, this may be the not
+ # distributed INF files during creation or the INF files checked for
# dependency rule during remove.
#
def __init__(self, FileName, WorkSpace=None, PackagePath='', Skip=False):
@@ -90,12 +90,12 @@ class InfPomAlignment(ModuleObject):
self._GenInfPomObjects(Skip)
##
- # Generate all POM objects, the original input comes
+ # Generate all POM objects, the original input comes
# from INF parser's output
#
def _GenInfPomObjects(self, Skip):
#
- # Call INF Parser to get information from INF file
+ # Call INF Parser to get information from INF file
#
self.Parser = InfParser.InfParser(self.FileName, self.WorkSpace)
self.FullPath = self.Parser.FullPath
@@ -144,8 +144,8 @@ class InfPomAlignment(ModuleObject):
self.SetFileName(self.FileName)
self.SetFullPath(self.FullPath)
#
- # The INF's filename (without the directory path or the extension)
- # must be used for the value of the
+ # The INF's filename (without the directory path or the extension)
+ # must be used for the value of the
# ModuleSurfaceArea.Header.Name element
#
self.SetName(os.path.splitext(os.path.basename(self.FileName))[0])
@@ -184,7 +184,7 @@ class InfPomAlignment(ModuleObject):
#
# must exist items in INF define section
- # MODULE_TYPE/BASE_NAME/INF_VERSION/FILE_GUID/VERSION_STRING
+ # MODULE_TYPE/BASE_NAME/INF_VERSION/FILE_GUID/VERSION_STRING
#
if DefineObj.GetModuleType() is None:
Logger.Error("InfParser", FORMAT_INVALID,
@@ -194,7 +194,7 @@ class InfPomAlignment(ModuleObject):
ModuleType = DefineObj.GetModuleType().GetValue()
if ModuleType:
#
- # Drivers and applications are not allowed to have a MODULE_TYPE of "BASE". Only
+ # Drivers and applications are not allowed to have a MODULE_TYPE of "BASE". Only
# libraries are permitted to a have a MODULE_TYPE of "BASE".
#
if len(DefineObj.LibraryClass) == 0 and ModuleType == 'BASE':
@@ -248,7 +248,7 @@ class InfPomAlignment(ModuleObject):
Logger.Error("Parser", PARSER_ERROR, ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF, ExtraData=self.FullPath,
RaiseError=Logger.IS_RAISE_ERROR)
#
- # if there is Shadow, Should judge the MODULE_TYPE in
+ # if there is Shadow, Should judge the MODULE_TYPE in
# SEC, PEI_CORE and PEIM
#
if DefineObj.GetShadow():
@@ -283,7 +283,7 @@ class InfPomAlignment(ModuleObject):
self._GenSpecialComments()
#
# put all define statement into user-extension sections
- #
+ #
DefinesDictNew = GenModuleHeaderUserExt(DefineObj, ArchString)
if DefinesDictNew:
UserExtension = CommonObject.UserExtensionObject()
@@ -293,7 +293,7 @@ class InfPomAlignment(ModuleObject):
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
#
# Get all meta-file header information
- # the record is list of items formated:
+ # the record is list of items formated:
# [LineValue, Arch, StartLine, ID, Third]
#
InfHeaderObj = self.Parser.InfHeader
@@ -356,13 +356,13 @@ class InfPomAlignment(ModuleObject):
self.SetExternList(self.GetExternList() + [Image])
#
# UNLOAD_IMAGE
- #
+ #
UnloadImageList = DefineObj.GetUnloadImages()
for UnloadImage in UnloadImageList:
Image = ExternObject()
#
# Future enhancement
- #
+ #
Image.SetUnloadImage(UnloadImage.GetCName())
self.SetExternList(self.GetExternList() + [Image])
#
@@ -373,7 +373,7 @@ class InfPomAlignment(ModuleObject):
Image = ExternObject()
#
# Future enhancement
- #
+ #
Image.SetConstructor(ConstructorItem.GetCName())
self.SetExternList(self.GetExternList() + [Image])
#
@@ -384,7 +384,7 @@ class InfPomAlignment(ModuleObject):
Image = ExternObject()
#
# Future enhancement
- #
+ #
Image.SetDestructor(DestructorItem.GetCName())
self.SetExternList(self.GetExternList() + [Image])
@@ -639,7 +639,7 @@ class InfPomAlignment(ModuleObject):
UserExtension = CommonObject.UserExtensionObject()
UserId = UserExtensionDataObj.GetUserId()
if UserId.startswith('"') and UserId.endswith('"'):
- UserId = UserId[1:-1]
+ UserId = UserId[1:-1]
UserExtension.SetUserID(UserId)
Identifier = UserExtensionDataObj.GetIdString()
if Identifier.startswith('"') and Identifier.endswith('"'):
@@ -653,11 +653,11 @@ class InfPomAlignment(ModuleObject):
UserExtension.SetStatement(UserExtensionDataObj.GetContent())
UserExtension.SetSupArchList(ConvertArchList(UserExtensionDataObj.GetSupArchList()))
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
-
+
#
# Gen UserExtensions of TianoCore."BinaryHeader"
#
-
+
#Get Binary header from INF file
BinaryAbstractList = self.BinaryHeaderAbstractList
BinaryDescriptionList = self.BinaryHeaderDescriptionList
@@ -684,7 +684,7 @@ class InfPomAlignment(ModuleObject):
BinaryUserExtension.SetIdentifier(DT.TAB_BINARY_HEADER_IDENTIFIER)
BinaryUserExtension.SetUserID(DT.TAB_BINARY_HEADER_USERID)
self.SetUserExtensionList(self.GetUserExtensionList() + [BinaryUserExtension])
-
+
def _GenDepexesList(self, SmmDepexList, DxeDepexList, PeiDepexList):
if SmmDepexList:
self.SetSmmDepex(SmmDepexList)
@@ -802,7 +802,7 @@ class InfPomAlignment(ModuleObject):
BinaryData = BinaryObj.keys()
#
- # If the INF file does not contain a [Sources] section, and the INF file does contain a [Binaries] section,
+ # If the INF file does not contain a [Sources] section, and the INF file does contain a [Binaries] section,
# then the ModuleSurfaceArea.BinaryModule attribute must be set to true. Otherwise, do not use the attribute
#
if BinaryObj and not self.Parser.InfSourcesSection.GetSources():
@@ -827,7 +827,7 @@ class InfPomAlignment(ModuleObject):
#
# BuildOption AsBuild Info
- #
+ #
for BuildOptionItem in self.Parser.InfBuildOptionSection.GetBuildOptions():
AsBuildBuildOptionList.append(BuildOptionItem)
AsBuildIns.SetBuildFlagsList(AsBuildBuildOptionList)
@@ -849,14 +849,14 @@ class InfPomAlignment(ModuleObject):
TempPath = ModulePath
ModulePath = os.path.dirname(ModulePath)
PackageName = TempPath
- DecFilePath = os.path.normpath(os.path.join(WorkSpace, PackageName))
+ DecFilePath = os.path.normpath(os.path.join(WorkSpace, PackageName))
if DecFilePath:
for File in os.listdir(DecFilePath):
if File.upper().endswith('.DEC'):
DecFileFullPath = os.path.normpath(os.path.join(DecFilePath, File))
DecObjList.append(DecPomAlignment(DecFileFullPath, self.WorkSpace))
-
-
+
+
BinariesDict, AsBuildIns, BinaryFileObjectList = GenBinaryData(BinaryData, BinaryObj,
BinariesDict,
AsBuildIns,
@@ -864,7 +864,7 @@ class InfPomAlignment(ModuleObject):
self.GetSupArchList(),
self.BinaryModule,
DecObjList)
-
+
BinariesDict2 = {}
for Key in BinariesDict:
ValueList = BinariesDict[Key]
@@ -872,7 +872,7 @@ class InfPomAlignment(ModuleObject):
BinariesDict2[Key] = ValueList
else:
#
- # if there is no TagName, ToolCode, HelpStr,
+ # if there is no TagName, ToolCode, HelpStr,
# then do not need to put them into userextension
#
(Target, Family, TagName, HelpStr) = ValueList[0]
@@ -1000,7 +1000,7 @@ class InfPomAlignment(ModuleObject):
HelpTxtTailObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTxtTailObj.SetString(HelpString)
ListObject.SetHelpTextList([HelpTxtTailObj])
-
+
GuidProtocolPpiList.append(ListObject)
elif Type == DT.TAB_PROTOCOLS:
ProtocolData = ProtocolObj.keys()
@@ -1068,10 +1068,10 @@ class InfPomAlignment(ModuleObject):
FileObj.SetURI(FileName)
MiscFileObj.SetFileList(MiscFileObj.GetFileList()+[FileObj])
else:
- Logger.Error("InfParser",
+ Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Line),
File=GlobalData.gINF_MODULE_NAME,
- ExtraData=Line)
- self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
-
+ ExtraData=Line)
+ self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
+
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
index 3bb506bea6..239078d019 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -184,16 +184,16 @@ def GenBinaryData(BinaryData, BinaryObj, BinariesDict, AsBuildIns, BinaryFileObj
else:
TagName = ''
Family = ''
-
+
FFE = ItemObj.GetFeatureFlagExp()
#
# If have architecturie specified, then use the specified architecturie;
# If the section tag does not have an architecture modifier or the modifier is "common" (case in-sensitive),
- # and the VALID_ARCHITECTURES comment exists, the list from the VALID_ARCHITECTURES comment
+ # and the VALID_ARCHITECTURES comment exists, the list from the VALID_ARCHITECTURES comment
# can be used for the attribute.
# If both not have VALID_ARCHITECTURE comment and no architecturie specified, then keep it empty.
- #
+ #
SupArchList = sorted(ConvertArchList(ItemObj.GetSupArchList()))
if len(SupArchList) == 1 and SupArchList[0] == 'COMMON':
if not (len(OriSupArchList) == 1 or OriSupArchList[0] == 'COMMON'):
@@ -208,7 +208,7 @@ def GenBinaryData(BinaryData, BinaryObj, BinariesDict, AsBuildIns, BinaryFileObj
#
# Get GUID value of the GUID CName in the DEC file
#
- if ItemObj.GetType() == DT.SUBTYPE_GUID_BINARY_FILE_TYPE:
+ if ItemObj.GetType() == DT.SUBTYPE_GUID_BINARY_FILE_TYPE:
if not CheckGuidRegFormat(ItemObj.GetGuidValue()):
if not DecObjList:
if DT.TAB_HORIZON_LINE_SPLIT in ItemObj.GetGuidValue() or \
@@ -231,12 +231,12 @@ def GenBinaryData(BinaryData, BinaryObj, BinariesDict, AsBuildIns, BinaryFileObj
FileNameObj.SetGuidValue(GuidObj.GetGuid())
break
- if not FileNameObj.GetGuidValue():
+ if not FileNameObj.GetGuidValue():
Logger.Error("\nMkPkg",
FORMAT_INVALID,
ST.ERR_DECPARSE_CGUID_NOT_FOUND % \
(ItemObj.GetGuidValue()),
- RaiseError=True)
+ RaiseError=True)
else:
FileNameObj.SetGuidValue(ItemObj.GetGuidValue().strip())
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/__init__.py b/BaseTools/Source/Python/UPT/PomAdapter/__init__.py
index e477b97baa..f54832683d 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/__init__.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
PomAdapter
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/UPT/ReplacePkg.py b/BaseTools/Source/Python/UPT/ReplacePkg.py
index 6f52b4f8f8..9531eb53e9 100644
--- a/BaseTools/Source/Python/UPT/ReplacePkg.py
+++ b/BaseTools/Source/Python/UPT/ReplacePkg.py
@@ -1,11 +1,11 @@
## @file
# Replace distribution package.
#
-# Copyright (c) 2014 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -51,26 +51,26 @@ def Main(Options = None):
WorkspaceDir = GlobalData.gWORKSPACE
Dep = DependencyRules(DataBase)
DistPkg, ContentZipFile, DpPkgFileName, DistFile = UnZipDp(WorkspaceDir, Options.PackFileToReplace)
-
+
StoredDistFile, OrigDpGuid, OrigDpVersion = GetInstalledDpInfo(Options.PackFileToBeReplaced, \
Dep, DataBase, WorkspaceDir)
-
+
#
# check dependency
#
CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion)
-
+
#
# Remove the old distribution
#
RemoveDist(OrigDpGuid, OrigDpVersion, StoredDistFile, DataBase, WorkspaceDir, Options.Yes)
-
+
#
# Install the new distribution
#
InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir, DataBase)
ReturnCode = 0
-
+
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
@@ -102,11 +102,11 @@ def Main(Options = None):
for TempDir in GlobalData.gUNPACK_DIR:
rmtree(TempDir)
GlobalData.gUNPACK_DIR = []
- Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
-
+
return ReturnCode
def CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion):
@@ -125,18 +125,18 @@ def CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion):
if Dep.CheckDpExists(DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion()):
Logger.Error("\nReplacePkg", UPT_ALREADY_INSTALLED_ERROR,
ST.WRN_DIST_PKG_INSTALLED,
- ExtraData=ST.MSG_REPLACE_ALREADY_INSTALLED_DP)
+ ExtraData=ST.MSG_REPLACE_ALREADY_INSTALLED_DP)
#
# check whether the original distribution could be replaced by new distribution
- #
+ #
Logger.Verbose(ST.MSG_CHECK_DP_FOR_REPLACE%(NewDpInfo, OrigDpInfo))
DepInfoResult = Dep.CheckDpDepexForReplace(OrigDpGuid, OrigDpVersion, NewDpPkgList)
Replaceable = DepInfoResult[0]
if not Replaceable:
Logger.Error("\nReplacePkg", UNKNOWN_ERROR,
ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY)
-
+
#
# check whether new distribution could be installed by dependency rule
#
diff --git a/BaseTools/Source/Python/UPT/RmPkg.py b/BaseTools/Source/Python/UPT/RmPkg.py
index 6427a8f16c..78469dc758 100644
--- a/BaseTools/Source/Python/UPT/RmPkg.py
+++ b/BaseTools/Source/Python/UPT/RmPkg.py
@@ -1,11 +1,11 @@
## @file
# Install distribution package.
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -58,7 +58,7 @@ def CheckDpDepex(Dep, Guid, Version, WorkspaceDir):
return 1
else:
#
- # report list of modules that are not valid due to force
+ # report list of modules that are not valid due to force
# remove,
# also generate a log file for reference
#
@@ -72,12 +72,12 @@ def CheckDpDepex(Dep, Guid, Version, WorkspaceDir):
LogFile.write("%s\n"%ModulePath)
Logger.Info(ModulePath)
except IOError:
- Logger.Warn("\nRmPkg", ST.ERR_FILE_WRITE_FAILURE,
+ Logger.Warn("\nRmPkg", ST.ERR_FILE_WRITE_FAILURE,
File=LogFilePath)
except IOError:
- Logger.Warn("\nRmPkg", ST.ERR_FILE_OPEN_FAILURE,
+ Logger.Warn("\nRmPkg", ST.ERR_FILE_OPEN_FAILURE,
File=LogFilePath)
- finally:
+ finally:
LogFile.close()
## Remove Path
@@ -85,7 +85,7 @@ def CheckDpDepex(Dep, Guid, Version, WorkspaceDir):
# removing readonly file on windows will get "Access is denied"
# error, so before removing, change the mode to be writeable
#
-# @param Path: The Path to be removed
+# @param Path: The Path to be removed
#
def RemovePath(Path):
Logger.Info(ST.MSG_REMOVE_FILE % Path)
@@ -122,43 +122,43 @@ def GetCurrentFileList(DataBase, Guid, Version, WorkspaceDir):
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
-# @param Options: command option
+# @param Options: command option
#
def Main(Options = None):
try:
- DataBase = GlobalData.gDB
+ DataBase = GlobalData.gDB
if not Options.DistributionFile:
- Logger.Error("RmPkg",
- OPTION_MISSING,
+ Logger.Error("RmPkg",
+ OPTION_MISSING,
ExtraData=ST.ERR_SPECIFY_PACKAGE)
WorkspaceDir = GlobalData.gWORKSPACE
#
# Prepare check dependency
#
Dep = DependencyRules(DataBase)
-
+
#
# Get the Dp information
#
StoredDistFile, Guid, Version = GetInstalledDpInfo(Options.DistributionFile, Dep, DataBase, WorkspaceDir)
- #
+ #
# Check Dp depex
#
CheckDpDepex(Dep, Guid, Version, WorkspaceDir)
- #
+ #
# remove distribution
#
RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, Options.Yes)
Logger.Quiet(ST.MSG_FINISH)
-
+
ReturnCode = 0
-
+
except FatalError as XExcept:
- ReturnCode = XExcept.args[0]
+ ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
format_exc())
@@ -203,7 +203,7 @@ def GetInstalledDpInfo(DistributionFile, Dep, DataBase, WorkspaceDir):
if not Dep.CheckDpExists(Guid, Version):
Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_DISTRIBUTION_NOT_INSTALLED)
#
- # Check for Distribution files existence in /conf/upt, if not exist,
+ # Check for Distribution files existence in /conf/upt, if not exist,
# Warn user and go on.
#
StoredDistFile = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gUPT_DIR, NewDpFileName))
@@ -252,7 +252,7 @@ def RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, ForceRemov
RemovePath(Path)
else:
MissingFileList.append(Path)
-
+
for Path in NewFileList:
if os.path.isfile(Path):
if (not ForceRemove) and (not os.path.split(Path)[1].startswith('.')):
diff --git a/BaseTools/Source/Python/UPT/UPT.py b/BaseTools/Source/Python/UPT/UPT.py
index 772974199f..004fc5ff2f 100644
--- a/BaseTools/Source/Python/UPT/UPT.py
+++ b/BaseTools/Source/Python/UPT/UPT.py
@@ -1,12 +1,12 @@
## @file
#
-# This file is the main entry for UPT
+# This file is the main entry for UPT
#
-# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
diff --git a/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py b/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py
index 9c50d2dc60..3371ec3c2f 100644
--- a/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py
+++ b/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -15,19 +15,19 @@ import os
import unittest
import Logger.Log as Logger
-from GenMetaFile.GenInfFile import GenGuidSections
+from GenMetaFile.GenInfFile import GenGuidSections
from GenMetaFile.GenInfFile import GenProtocolPPiSections
from GenMetaFile.GenInfFile import GenPcdSections
from GenMetaFile.GenInfFile import GenSpecialSections
from Library.CommentGenerating import GenGenericCommentF
-from Library.CommentGenerating import _GetHelpStr
+from Library.CommentGenerating import _GetHelpStr
from Object.POM.CommonObject import TextObject
from Object.POM.CommonObject import GuidObject
from Object.POM.CommonObject import ProtocolObject
from Object.POM.CommonObject import PpiObject
from Object.POM.CommonObject import PcdObject
from Object.POM.ModuleObject import HobObject
-
+
from Library.StringUtils import GetSplitValueList
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_LANGUAGE_EN_US
@@ -46,7 +46,7 @@ class _GetHelpStrTest(unittest.TestCase):
def tearDown(self):
pass
-
+
#
# Normal case1: have one help text object with Lang = 'en-US'
#
@@ -55,11 +55,11 @@ class _GetHelpStrTest(unittest.TestCase):
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
HelpTextObj.SetString(HelpStr)
-
+
HelpTextList = [HelpTextObj]
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, HelpStr)
-
+
#
# Normal case2: have two help text object with Lang = 'en-US' and other
#
@@ -68,16 +68,16 @@ class _GetHelpStrTest(unittest.TestCase):
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
HelpTextObj.SetString(HelpStr)
-
+
HelpTextList = [HelpTextObj]
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
- HelpTextObj.SetString(ExpectedStr)
-
+ HelpTextObj.SetString(ExpectedStr)
+
HelpTextList.append(HelpTextObj)
-
+
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
@@ -89,16 +89,16 @@ class _GetHelpStrTest(unittest.TestCase):
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
-
+
HelpTextList = [HelpTextObj]
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
- HelpTextObj.SetString(ExpectedStr)
-
+ HelpTextObj.SetString(ExpectedStr)
+
HelpTextList.append(HelpTextObj)
-
+
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
@@ -110,15 +110,15 @@ class _GetHelpStrTest(unittest.TestCase):
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
- HelpTextObj.SetString(ExpectedStr)
+ HelpTextObj.SetString(ExpectedStr)
HelpTextList = [HelpTextObj]
-
+
HelpStr = 'Hello world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
- HelpTextObj.SetString(HelpStr)
+ HelpTextObj.SetString(HelpStr)
HelpTextList.append(HelpTextObj)
-
+
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
@@ -130,27 +130,27 @@ class _GetHelpStrTest(unittest.TestCase):
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
- HelpTextObj.SetString(ExpectedStr)
+ HelpTextObj.SetString(ExpectedStr)
HelpTextList = [HelpTextObj]
-
+
HelpStr = 'Hello unknown world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
- HelpTextObj.SetString(HelpStr)
+ HelpTextObj.SetString(HelpStr)
HelpTextList.append(HelpTextObj)
HelpStr = 'Hello mysterious world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
- HelpTextObj.SetString(HelpStr)
+ HelpTextObj.SetString(HelpStr)
HelpTextList.append(HelpTextObj)
-
+
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
-
+
HelpTextList.sort()
self.assertEqual(Result, ExpectedStr)
-
+
HelpTextList.sort(reverse=True)
self.assertEqual(Result, ExpectedStr)
@@ -160,15 +160,15 @@ class _GetHelpStrTest(unittest.TestCase):
#
class GenGuidSectionsTest(unittest.TestCase):
def setUp(self):
- pass
+ pass
def tearDown(self):
pass
-
+
#
# This is the API to generate Guid Object to help UnitTest
#
- def GuidFactory(self, CName, FFE, Usage, GuidType, VariableName, HelpStr):
+ def GuidFactory(self, CName, FFE, Usage, GuidType, VariableName, HelpStr):
Guid = GuidObject()
Guid.SetCName(CName)
Guid.SetFeatureFlag(FFE)
@@ -180,22 +180,22 @@ class GenGuidSectionsTest(unittest.TestCase):
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Guid.SetHelpTextList([HelpTextObj])
-
- return Guid
-
+
+ return Guid
+
#
# Normal case: have two GuidObject
#
def testNormalCase1(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -205,10 +205,10 @@ class GenGuidSectionsTest(unittest.TestCase):
GuidType = 'Variable'
VariableName = ''
HelpStr = 'Usage comment line 2'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
- GuidList.append(Guid1)
-
+ GuidList.append(Guid1)
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## Event # Usage comment line 1
@@ -221,14 +221,14 @@ Guid1|FFE1'''
#
def testNormalCase2(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -238,10 +238,10 @@ Guid1|FFE1'''
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Generic comment line 1\n Generic comment line 2'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
- GuidList.append(Guid1)
-
+ GuidList.append(Guid1)
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## Event # Usage comment line 1
@@ -252,7 +252,7 @@ Guid1|FFE1'''
self.assertEqual(Result.strip(), Expected)
#
- # Normal case: have two GuidObject, one help goes to generic help,
+ # Normal case: have two GuidObject, one help goes to generic help,
# the other go into usage comment
#
def testNormalCase3(self):
@@ -264,20 +264,20 @@ Guid1|FFE1'''
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Generic comment'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
- GuidList.append(Guid1)
-
+ GuidList.append(Guid1)
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
-
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
# Generic comment
@@ -298,10 +298,10 @@ Guid1|FFE1'''
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Generic comment line1 \n generic comment line 2'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
- GuidList.append(Guid1)
-
+ GuidList.append(Guid1)
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
# Generic comment line1
@@ -315,17 +315,17 @@ Guid1|FFE1'''
#
def testNormalCase6(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1\n Usage comment line 2'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
-
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
Guid1|FFE1 ## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
@@ -337,36 +337,36 @@ Guid1|FFE1 ## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
#
def testNormalCase7(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Usage comment line 1'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
-
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
Guid1|FFE1 # Usage comment line 1
'''
self.assertEqual(Result.strip(), Expected.strip())
-
+
#
# Normal case: have two GuidObject
#
def testNormalCase8(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1\n Usage comment line 2'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -376,10 +376,10 @@ Guid1|FFE1 # Usage comment line 1
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 3'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
-
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
@@ -403,14 +403,14 @@ Guid1|FFE1
#
def testNormalCase10(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = ''
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -425,14 +425,14 @@ Guid1|FFE1
#
def testNormalCase11(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'general comment line 1'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -442,7 +442,7 @@ Guid1|FFE1
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 3'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -452,10 +452,10 @@ Guid1|FFE1
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'general comment line 2'
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
-
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
# general comment line 1
@@ -470,14 +470,14 @@ Guid1|FFE1
#
def testNormalCase12(self):
GuidList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'GUID'
VariableName = ''
HelpStr = ''
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -487,7 +487,7 @@ Guid1|FFE1
GuidType = 'Event'
VariableName = ''
HelpStr = ''
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
@@ -497,10 +497,10 @@ Guid1|FFE1
GuidType = 'Event'
VariableName = ''
HelpStr = ''
- Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
-
+
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## GUID
@@ -509,17 +509,17 @@ Guid1|FFE1
Guid1|FFE1
'''
self.assertEqual(Result.strip(), Expected.strip())
-
+
#
# Test GenProtocolPPiSections
#
class GenProtocolPPiSectionsTest(unittest.TestCase):
def setUp(self):
- pass
+ pass
def tearDown(self):
pass
-
+
#
# This is the API to generate Protocol/Ppi Object to help UnitTest
#
@@ -528,7 +528,7 @@ class GenProtocolPPiSectionsTest(unittest.TestCase):
Object = ProtocolObject()
else:
Object = PpiObject()
-
+
Object.SetCName(CName)
Object.SetFeatureFlag(FFE)
Object.SetUsage(Usage)
@@ -538,8 +538,8 @@ class GenProtocolPPiSectionsTest(unittest.TestCase):
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Object.SetHelpTextList([HelpTextObj])
-
- return Object
+
+ return Object
# Usage Notify Help INF Comment
#1 UNDEFINED true Present ## UNDEFINED ## NOTIFY # Help
@@ -557,19 +557,19 @@ class GenProtocolPPiSectionsTest(unittest.TestCase):
def testNormalCase1(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'UNDEFINED'
Notify = True
HelpStr = 'Help'
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
@@ -577,11 +577,11 @@ Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
IsProtocol = False
ObjectList = []
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Ppis]
Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
@@ -589,19 +589,19 @@ Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
def testNormalCase2(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'UNDEFINED'
Notify = True
HelpStr = ''
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED ## NOTIFY'''
@@ -609,19 +609,19 @@ Guid1|FFE1 ## UNDEFINED ## NOTIFY'''
def testNormalCase3(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'UNDEFINED'
Notify = False
HelpStr = 'Help'
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED # Help'''
@@ -629,19 +629,19 @@ Guid1|FFE1 ## UNDEFINED # Help'''
def testNormalCase4(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'UNDEFINED'
Notify = False
HelpStr = ''
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED'''
@@ -649,19 +649,19 @@ Guid1|FFE1 ## UNDEFINED'''
def testNormalCase5(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'UNDEFINED'
Notify = ''
HelpStr = 'Help'
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 # Help'''
@@ -669,19 +669,19 @@ Guid1|FFE1 # Help'''
def testNormalCase6(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'UNDEFINED'
Notify = ''
HelpStr = ''
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1'''
@@ -689,19 +689,19 @@ Guid1|FFE1'''
def testNormalCase7(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'PRODUCES'
Notify = True
HelpStr = 'Help'
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES ## NOTIFY # Help'''
@@ -709,19 +709,19 @@ Guid1|FFE1 ## PRODUCES ## NOTIFY # Help'''
def testNormalCase8(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'PRODUCES'
Notify = True
HelpStr = ''
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES ## NOTIFY'''
@@ -729,19 +729,19 @@ Guid1|FFE1 ## PRODUCES ## NOTIFY'''
def testNormalCase9(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'PRODUCES'
Notify = False
HelpStr = 'Help'
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES # Help'''
@@ -749,19 +749,19 @@ Guid1|FFE1 ## PRODUCES # Help'''
def testNormalCaseA(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'PRODUCES'
Notify = False
HelpStr = ''
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES'''
@@ -769,19 +769,19 @@ Guid1|FFE1 ## PRODUCES'''
def testNormalCaseB(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'PRODUCES'
Notify = ''
HelpStr = 'Help'
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES # Help'''
@@ -789,19 +789,19 @@ Guid1|FFE1 ## PRODUCES # Help'''
def testNormalCaseC(self):
ObjectList = []
-
+
CName = 'Guid1'
FFE = 'FFE1'
-
+
Usage = 'PRODUCES'
Notify = ''
HelpStr = ''
IsProtocol = True
- Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
-
-
+
+
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES'''
@@ -812,18 +812,18 @@ Guid1|FFE1 ## PRODUCES'''
#
class GenPcdSectionsTest(unittest.TestCase):
def setUp(self):
- pass
+ pass
def tearDown(self):
pass
-
+
#
# This is the API to generate Pcd Object to help UnitTest
#
def ObjectFactory(self, ItemType, TSCName, CName, DValue, FFE, Usage, Str):
Object = PcdObject()
HelpStr = Str
-
+
Object.SetItemType(ItemType)
Object.SetTokenSpaceGuidCName(TSCName)
Object.SetCName(CName)
@@ -835,8 +835,8 @@ class GenPcdSectionsTest(unittest.TestCase):
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Object.SetHelpTextList([HelpTextObj])
-
- return Object
+
+ return Object
# Usage Help INF Comment
@@ -852,14 +852,14 @@ class GenPcdSectionsTest(unittest.TestCase):
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'UNDEFINED'
Str = 'Help'
-
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = \
'[Pcd]\n' + \
@@ -873,14 +873,14 @@ class GenPcdSectionsTest(unittest.TestCase):
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'UNDEFINED'
Str = ''
-
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '[Pcd]\nTSCName.CName|DValue|FFE'
self.assertEqual(Result.strip(), Expected)
@@ -892,14 +892,14 @@ class GenPcdSectionsTest(unittest.TestCase):
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'CONSUMES'
Str = 'Help'
-
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '[Pcd]\nTSCName.CName|DValue|FFE ## CONSUMES # Help'
self.assertEqual(Result.strip(), Expected)
@@ -911,14 +911,14 @@ class GenPcdSectionsTest(unittest.TestCase):
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'CONSUMES'
Str = ''
-
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '[Pcd]\nTSCName.CName|DValue|FFE ## CONSUMES'
self.assertEqual(Result.strip(), Expected)
@@ -933,13 +933,13 @@ class GenPcdSectionsTest(unittest.TestCase):
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'CONSUMES'
Str = 'commment line 1\ncomment line 2'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '''[Pcd]
TSCName.CName|DValue|FFE ## CONSUMES # commment line 1 comment line 2'''
@@ -955,19 +955,19 @@ TSCName.CName|DValue|FFE ## CONSUMES # commment line 1 comment line 2'''
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'UNDEFINED'
Str = 'commment line 1\ncomment line 2'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Usage = 'UNDEFINED'
Str = 'commment line 3'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '''[Pcd]
# commment line 1
@@ -986,25 +986,25 @@ TSCName.CName|DValue|FFE'''
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'UNDEFINED'
Str = 'commment line 1\ncomment line 2'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Usage = 'CONSUMES'
Str = 'Foo'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Usage = 'UNDEFINED'
Str = 'commment line 3'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '''[Pcd]
# commment line 1
@@ -1028,13 +1028,13 @@ TSCName.CName|DValue|FFE'''
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'CONSUMES'
Str = 'commment line 1\ncomment line 2'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '''[FeaturePcd]
# commment line 1
@@ -1052,13 +1052,13 @@ TSCName.CName|DValue|FFE'''
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'CONSUMES'
Str = ''
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '''[FeaturePcd]
TSCName.CName|DValue|FFE'''
@@ -1074,13 +1074,13 @@ TSCName.CName|DValue|FFE'''
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
-
+
Usage = 'PRODUCES'
Str = 'commment line 1\ncomment line 2'
- Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
-
+
Result = GenPcdSections(ObjectList)
Expected = '''
@@ -1097,28 +1097,28 @@ TSCName.CName|DValue|FFE
#
class GenHobSectionsTest(unittest.TestCase):
def setUp(self):
- pass
+ pass
def tearDown(self):
pass
-
+
#
# This is the API to generate Event Object to help UnitTest
#
def ObjectFactory(self, SupArchList, Type, Usage, Str):
Object = HobObject()
HelpStr = Str
-
+
Object.SetHobType(Type)
Object.SetUsage(Usage)
Object.SetSupArchList(SupArchList)
-
+
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Object.SetHelpTextList([HelpTextObj])
-
- return Object
+
+ return Object
def testNormalCase1(self):
ObjectList = []
@@ -1126,10 +1126,10 @@ class GenHobSectionsTest(unittest.TestCase):
Type = 'Foo'
Usage = 'UNDEFINED'
Str = 'Help'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1147,10 +1147,10 @@ class GenHobSectionsTest(unittest.TestCase):
Type = 'Foo'
Usage = 'UNDEFINED'
Str = 'Help'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob]
# ##
@@ -1161,17 +1161,17 @@ class GenHobSectionsTest(unittest.TestCase):
#
'''
self.assertEqual(Result, Expected)
-
+
def testNormalCase3(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\nComment Line 1\n\n'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1189,10 +1189,10 @@ class GenHobSectionsTest(unittest.TestCase):
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\nComment Line 1\n'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1210,10 +1210,10 @@ class GenHobSectionsTest(unittest.TestCase):
Type = 'Foo'
Usage = 'UNDEFINED'
Str = 'Comment Line 1\n\n'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1231,17 +1231,17 @@ class GenHobSectionsTest(unittest.TestCase):
Type = 'Foo'
Usage = 'UNDEFINED'
Str = ''
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# Foo ## UNDEFINED
#
#
'''
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase7(self):
ObjectList = []
@@ -1250,10 +1250,10 @@ class GenHobSectionsTest(unittest.TestCase):
Usage = 'UNDEFINED'
Str = '\nNew Stack HoB'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1272,10 +1272,10 @@ class GenHobSectionsTest(unittest.TestCase):
Usage = 'UNDEFINED'
Str = '\nNew Stack HoB\n\nTail Comment'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1287,7 +1287,7 @@ class GenHobSectionsTest(unittest.TestCase):
#
#
'''
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase9(self):
ObjectList = []
@@ -1296,10 +1296,10 @@ class GenHobSectionsTest(unittest.TestCase):
Usage = 'UNDEFINED'
Str = '\n\n'
-
+
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1309,7 +1309,7 @@ class GenHobSectionsTest(unittest.TestCase):
#
#
'''
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase10(self):
ObjectList = []
@@ -1320,7 +1320,7 @@ class GenHobSectionsTest(unittest.TestCase):
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1330,7 +1330,7 @@ class GenHobSectionsTest(unittest.TestCase):
#
#
'''
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase11(self):
ObjectList = []
@@ -1341,7 +1341,7 @@ class GenHobSectionsTest(unittest.TestCase):
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1351,7 +1351,7 @@ class GenHobSectionsTest(unittest.TestCase):
#
#
'''
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase12(self):
ObjectList = []
@@ -1362,7 +1362,7 @@ class GenHobSectionsTest(unittest.TestCase):
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
-
+
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
@@ -1373,18 +1373,18 @@ class GenHobSectionsTest(unittest.TestCase):
#
#
'''
- self.assertEqual(Result, Expected)
-
+ self.assertEqual(Result, Expected)
+
#
# Test GenGenericCommentF
#
class GenGenericCommentFTest(unittest.TestCase):
def setUp(self):
- pass
+ pass
def tearDown(self):
pass
-
+
def testNormalCase1(self):
CommentLines = 'Comment Line 1'
Result = GenGenericCommentF(CommentLines)
@@ -1395,26 +1395,26 @@ class GenGenericCommentFTest(unittest.TestCase):
CommentLines = '\n'
Result = GenGenericCommentF(CommentLines)
Expected = '#\n'
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase3(self):
CommentLines = '\n\n\n'
Result = GenGenericCommentF(CommentLines)
Expected = '#\n#\n#\n'
- self.assertEqual(Result, Expected)
+ self.assertEqual(Result, Expected)
def testNormalCase4(self):
CommentLines = 'coment line 1\n'
Result = GenGenericCommentF(CommentLines)
Expected = '# coment line 1\n'
- self.assertEqual(Result, Expected)
-
+ self.assertEqual(Result, Expected)
+
def testNormalCase5(self):
CommentLines = 'coment line 1\n coment line 2\n'
Result = GenGenericCommentF(CommentLines)
Expected = '# coment line 1\n# coment line 2\n'
- self.assertEqual(Result, Expected)
-
+ self.assertEqual(Result, Expected)
+
if __name__ == '__main__':
Logger.Initialize()
- unittest.main() \ No newline at end of file
+ unittest.main()
diff --git a/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py b/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py
index 4593506e58..e719567f99 100644
--- a/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py
+++ b/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -32,7 +32,7 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
def tearDown(self):
pass
-
+
#
# Normal case1: have license/copyright/license above @file
#
@@ -42,36 +42,36 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
# License2
#
## @file
- # example abstract
- #
+ # example abstract
+ #
# example description
- #
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
- #
- # License3
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ # License3
#'''
-
+
CommentList = GetSplitValueList(TestCommentLines1, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = 'example abstract'
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = 'example description'
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
'Copyright (c) 2007 - 2010,'\
' Intel Corporation. All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = 'License1\nLicense2\n\nLicense3'
self.assertEqual(License, ExpectedLicense)
@@ -84,41 +84,41 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
# License2
#
## @file
- # example abstract
- #
+ # example abstract
+ #
# example description
#
- #Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+ #Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines2, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = 'example abstract'
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = 'example description'
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = 'License1\nLicense2'
self.assertEqual(License, ExpectedLicense)
-
+
#
- # Normal case2: have license/copyright/license above @file,
+ # Normal case2: have license/copyright/license above @file,
# but no abstract/description
#
def testNormalCase3(self):
@@ -126,41 +126,41 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
''' # License1
# License2
#
- ## @file
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+ ## @file
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License3 Line1
# License3 Line2
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines3, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = ''
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = ''
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
'Copyright (c) 2007 - 2010,'\
' Intel Corporation. All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = \
'License1\n' \
'License2\n\n' \
'License3 Line1\n' \
'License3 Line2'
- self.assertEqual(License, ExpectedLicense)
-
+ self.assertEqual(License, ExpectedLicense)
+
#
# Normal case4: format example in spec
#
@@ -172,33 +172,33 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
#
# Description
#
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
@@ -214,37 +214,37 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
#
# Description
#
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
- # other line
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # other line
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
@@ -260,39 +260,39 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
#
# Description
#
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
- # Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
#
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
@@ -307,39 +307,39 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
#
# Description
#
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
- # Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
#
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = ''
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
@@ -353,37 +353,37 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
## @file
# Abstact
#
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
-
+
ExpectedAbstract = 'Abstact'
self.assertEqual(Abstract, ExpectedAbstract)
-
+
ExpectedDescription = ''
self.assertEqual(Description, ExpectedDescription)
-
+
ExpectedCopyright = \
- 'Copyright (c) 2007 - 2010, Intel Corporation.'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
-
+
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
-
+
#
# Error case1: No copyright found
#
@@ -398,18 +398,18 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
- self.assertRaises(Logger.FatalError,
- ParseHeaderCommentSection,
+
+ self.assertRaises(Logger.FatalError,
+ ParseHeaderCommentSection,
TestCommentLinesList,
- "PhonyFile")
+ "PhonyFile")
#
# Error case2: non-empty non-comment lines passed in
@@ -423,22 +423,22 @@ class ParseHeaderCommentSectionTest(unittest.TestCase):
this is invalid line
# Description
#
- # Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# License
#
##'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
- self.assertRaises(Logger.FatalError,
- ParseHeaderCommentSection,
+
+ self.assertRaises(Logger.FatalError,
+ ParseHeaderCommentSection,
TestCommentLinesList,
- "PhonyFile")
+ "PhonyFile")
#
# Test ParseGenericComment
@@ -449,14 +449,14 @@ class ParseGenericCommentTest(unittest.TestCase):
def tearDown(self):
pass
-
+
#
# Normal case1: one line of comment
#
def testNormalCase1(self):
TestCommentLines = \
'''# hello world'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
@@ -476,17 +476,17 @@ class ParseGenericCommentTest(unittest.TestCase):
TestCommentLines = \
'''## hello world
# second line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase2')
self.failIf(not HelptxtObj)
- self.assertEqual(HelptxtObj.GetString(),
+ self.assertEqual(HelptxtObj.GetString(),
'hello world\n' + 'second line')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
@@ -497,17 +497,17 @@ class ParseGenericCommentTest(unittest.TestCase):
TestCommentLines = \
'''## hello world
This is not comment line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase3')
self.failIf(not HelptxtObj)
- self.assertEqual(HelptxtObj.GetString(),
+ self.assertEqual(HelptxtObj.GetString(),
'hello world\n\n')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
@@ -520,7 +520,7 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
def tearDown(self):
pass
-
+
#
# Normal case1: comments with no special comment
#
@@ -528,22 +528,22 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
TestCommentLines = \
'''## hello world
# second line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'testNormalCase1')
self.failIf(not HelpTxt)
self.failIf(PcdErr)
- self.assertEqual(HelpTxt,
+ self.assertEqual(HelpTxt,
'hello world\n' + 'second line')
-
-
+
+
#
# Normal case2: comments with valid list
#
@@ -553,19 +553,19 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
# second line
# @ValidList 1, 2, 3
# other line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
- self.assertEqual(HelpTxt,
+ self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
ExpectedList = GetSplitValueList('1 2 3', TAB_SPACE_SPLIT)
ActualList = [item for item in \
@@ -583,19 +583,19 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
# second line
# @ValidRange LT 1 AND GT 2
# other line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
- self.assertEqual(HelpTxt,
+ self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
self.assertEqual(PcdErr.GetValidValueRange().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetExpression())
@@ -610,19 +610,19 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
# second line
# @Expression LT 1 AND GT 2
# other line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
- self.assertEqual(HelpTxt,
+ self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetValidValueRange())
@@ -634,14 +634,14 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
def testNormalCase5(self):
TestCommentLines = \
'''# @Expression LT 1 AND GT 2'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(HelpTxt)
@@ -649,28 +649,28 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetValidValueRange())
self.failIf(PcdErr.GetValidValue())
-
+
#
# Normal case6: comments with only generic help text
#
def testNormalCase6(self):
TestCommentLines = \
'''#'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.assertEqual(HelpTxt, '\n')
self.failIf(PcdErr)
-
-
+
+
#
# Error case1: comments with both expression and valid list, use later
# ignore the former and with a warning message
@@ -679,17 +679,17 @@ class ParseDecPcdGenericCommentTest(unittest.TestCase):
TestCommentLines = \
'''## hello world
# second line
- # @ValidList 1, 2, 3
+ # @ValidList 1, 2, 3
# @Expression LT 1 AND GT 2
# other line'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
try:
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
except Logger.FatalError:
@@ -704,26 +704,26 @@ class ParseDecPcdTailCommentTest(unittest.TestCase):
def tearDown(self):
pass
-
+
#
# Normal case1: comments with no SupModeList
#
def testNormalCase1(self):
TestCommentLines = \
'''## #hello world'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(SupModeList)
- self.assertEqual(HelpStr,
+ self.assertEqual(HelpStr,
'hello world')
#
@@ -732,44 +732,44 @@ class ParseDecPcdTailCommentTest(unittest.TestCase):
def testNormalCase2(self):
TestCommentLines = \
'''## BASE #hello world'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(not SupModeList)
- self.assertEqual(HelpStr,
+ self.assertEqual(HelpStr,
'hello world')
- self.assertEqual(SupModeList,
+ self.assertEqual(SupModeList,
['BASE'])
-
+
#
# Normal case3: comments with more than one SupMode
#
def testNormalCase3(self):
TestCommentLines = \
'''## BASE UEFI_APPLICATION #hello world'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(not SupModeList)
- self.assertEqual(HelpStr,
+ self.assertEqual(HelpStr,
'hello world')
- self.assertEqual(SupModeList,
+ self.assertEqual(SupModeList,
['BASE', 'UEFI_APPLICATION'])
#
@@ -778,58 +778,58 @@ class ParseDecPcdTailCommentTest(unittest.TestCase):
def testNormalCase4(self):
TestCommentLines = \
'''## BASE UEFI_APPLICATION'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(HelpStr)
self.failIf(not SupModeList)
- self.assertEqual(SupModeList,
+ self.assertEqual(SupModeList,
['BASE', 'UEFI_APPLICATION'])
#
- # Normal case5: general comments with no supModList, extract from real case
+ # Normal case5: general comments with no supModList, extract from real case
#
def testNormalCase5(self):
TestCommentLines = \
''' # 1 = 128MB, 2 = 256MB, 3 = MAX'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
- self.assertEqual(HelpStr,
+ self.assertEqual(HelpStr,
'1 = 128MB, 2 = 256MB, 3 = MAX')
self.failIf(SupModeList)
-
+
#
- # Error case2: comments with supModList contains valid and invalid
+ # Error case2: comments with supModList contains valid and invalid
# module type
#
def testErrorCase2(self):
TestCommentLines = \
'''## BASE INVALID_MODULE_TYPE #hello world'''
-
+
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
-
+
try:
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
except Logger.FatalError:
@@ -869,7 +869,7 @@ class _IsCopyrightLineTest(unittest.TestCase):
Line = 'this is not aCopyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(Result)
-
+
#
# Normal case
#
@@ -917,7 +917,7 @@ class _IsCopyrightLineTest(unittest.TestCase):
Line = 'not a Copyright '
Result = _IsCopyrightLine(Line)
self.failIf(Result)
-
+
if __name__ == '__main__':
Logger.Initialize()
- unittest.main() \ No newline at end of file
+ unittest.main()
diff --git a/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py b/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py
index 5f0abcafef..0ea5ea1e0f 100644
--- a/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py
+++ b/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py
@@ -1,11 +1,11 @@
## @file
# This file contain unit test for DecParser
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -28,19 +28,19 @@ from Library.ParserValidate import IsValidCFormatGuid
#
def TestToolFuncs():
assert IsValidCArray('{0x1, 0x23}')
-
+
# Empty after comma
assert not IsValidCArray('{0x1, 0x23, }')
-
+
# 0x2345 too long
assert not IsValidCArray('{0x1, 0x2345}')
-
+
# Must end with '}'
assert not IsValidCArray('{0x1, 0x23, ')
-
+
# Whitespace between numbers
assert not IsValidCArray('{0x1, 0x2 3, }')
-
+
assert IsValidPcdDatum('VOID*', '"test"')[0]
assert IsValidPcdDatum('VOID*', 'L"test"')[0]
assert IsValidPcdDatum('BOOLEAN', 'TRUE')[0]
@@ -48,10 +48,10 @@ def TestToolFuncs():
assert IsValidPcdDatum('BOOLEAN', '0')[0]
assert IsValidPcdDatum('BOOLEAN', '1')[0]
assert IsValidPcdDatum('UINT8', '0xab')[0]
-
+
assert not IsValidPcdDatum('UNKNOWNTYPE', '0xabc')[0]
- assert not IsValidPcdDatum('UINT8', 'not number')[0]
-
+ assert not IsValidPcdDatum('UINT8', 'not number')[0]
+
assert( IsValidCFormatGuid('{ 0xfa0b1735 , 0x87a0, 0x4193, {0xb2, 0x66 , 0x53, 0x8c , 0x38, 0xaf, 0x48, 0xce }}'))
assert( not IsValidCFormatGuid('{ 0xfa0b1735 , 0x87a0, 0x4193, {0xb2, 0x66 , 0x53, 0x8c , 0x38, 0xaf, 0x48, 0xce }} 0xaa'))
@@ -60,10 +60,10 @@ def TestTemplate(TestString, TestFunc):
Path = os.path.normpath(Path)
try:
f = open(Path, 'w')
-
+
# Write test string to file
f.write(TestString)
-
+
# Close file
f.close()
except:
@@ -72,7 +72,7 @@ def TestTemplate(TestString, TestFunc):
# Call test function to test
Ret = TestFunc(Path, TestString)
-
+
# Test done, remove temporary file
os.remove(Path)
return Ret
@@ -112,13 +112,13 @@ def TestDecDefine():
assert DefObj.GetPackageName() == 'MdePkg'
assert DefObj.GetPackageGuid() == '1E73767F-8F52-4603-AEB4-F29B510B6766'
assert DefObj.GetPackageVersion() == '1.02'
-
+
TestString = '''
[Defines]
UNKNOW_KEY = 0x00010005 # A unknown key
'''
assert TestTemplate(TestString, TestError)
-
+
TestString = '''
[Defines]
PACKAGE_GUID = F-8F52-4603-AEB4-F29B510B6766 # Error GUID
@@ -138,24 +138,24 @@ def TestDecInclude():
[Includes.IA32]
Include/Ia32
'''
-
+
# Create directory in current directory
try:
os.makedirs('Include/Ia32')
except:
pass
Parser = TestTemplate(TestString, TestOK)
-
+
IncObj = Parser.GetIncludeSectionObject()
Items = IncObj.GetIncludes()
assert len(Items) == 1
assert Items[0].File == 'Include'
-
+
Items = IncObj.GetIncludes('IA32')
assert len(Items) == 1
# normpath is called in DEC parser so '/' is converted to '\'
assert Items[0].File == 'Include\\Ia32'
-
+
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
@@ -166,7 +166,7 @@ def TestDecInclude():
Include_not_exist # directory does not exist
'''
assert TestTemplate(TestString, TestError)
-
+
os.removedirs('Include/Ia32')
def TestDecGuidPpiProtocol():
@@ -196,14 +196,14 @@ def TestDecGuidPpiProtocol():
assert len(Items) == 1
assert Items[0].GuidCName == 'gEfiGlobalVariableGuid'
assert Items[0].GuidCValue == '{ 0x8BE4DF61, 0x93CA, 0x11D2, { 0xAA, 0x0D, 0x00, 0xE0, 0x98, 0x03, 0x2B, 0x8C }}'
-
+
Obj = Parser.GetProtocolSectionObject()
Items = Obj.GetProtocols()
assert Obj.GetSectionName() == 'Protocols'.upper()
assert len(Items) == 1
assert Items[0].GuidCName == 'gEfiBdsArchProtocolGuid'
assert Items[0].GuidCValue == '{ 0x665E3FF6, 0x46CC, 0x11D4, { 0x9A, 0x38, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}'
-
+
Obj = Parser.GetPpiSectionObject()
Items = Obj.GetPpis()
assert Obj.GetSectionName() == 'Ppis'.upper()
@@ -221,19 +221,19 @@ def TestDecPcd():
[PcdsFeatureFlag]
## If TRUE, the component name protocol will not be installed.
gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d
-
+
[PcdsFixedAtBuild]
## Indicates the maximum length of unicode string
gEfiMdePkgTokenSpaceGuid.PcdMaximumUnicodeStringLength|1000000|UINT32|0x00000001
-
+
[PcdsFixedAtBuild.IPF]
## The base address of IO port space for IA64 arch
gEfiMdePkgTokenSpaceGuid.PcdIoBlockBaseAddressForIpf|0x0ffffc000000|UINT64|0x0000000f
-
+
[PcdsFixedAtBuild,PcdsPatchableInModule]
## This flag is used to control the printout of DebugLib
gEfiMdePkgTokenSpaceGuid.PcdDebugPrintErrorLevel|0x80000000|UINT32|0x00000006
-
+
[PcdsFixedAtBuild,PcdsPatchableInModule,PcdsDynamic]
## This value is used to set the base address of pci express hierarchy
gEfiMdePkgTokenSpaceGuid.PcdPciExpressBaseAddress|0xE0000000|UINT64|0x0000000a
@@ -247,7 +247,7 @@ def TestDecPcd():
assert Items[0].DefaultValue == 'FALSE'
assert Items[0].DatumType == 'BOOLEAN'
assert Items[0].TokenValue == '0x0000000d'
-
+
Items = Obj.GetPcdsByType('PcdsFixedAtBuild')
assert len(Items) == 4
assert len(Obj.GetPcdsByType('PcdsPatchableInModule')) == 2
diff --git a/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py b/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
index 2f4917525b..afea4a438b 100644
--- a/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
+++ b/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
@@ -1,11 +1,11 @@
## @file
# This file contain unit test for DecParser
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -24,7 +24,7 @@ from Parser.DecParser import \
FileContent, \
_DecBase, \
CleanString
-
+
from Object.Parser.DecObject import _DecComments
#
@@ -35,19 +35,19 @@ class CleanStringTestCase(unittest.TestCase):
Line, Comment = CleanString('')
self.assertEqual(Line, '')
self.assertEqual(Comment, '')
-
+
Line, Comment = CleanString('line without comment')
self.assertEqual(Line, 'line without comment')
self.assertEqual(Comment, '')
-
+
Line, Comment = CleanString('# pure comment')
self.assertEqual(Line, '')
self.assertEqual(Comment, '# pure comment')
-
+
Line, Comment = CleanString('line # and comment')
self.assertEqual(Line, 'line')
self.assertEqual(Comment, '# and comment')
-
+
def testCleanStringCpp(self):
Line, Comment = CleanString('line // and comment', AllowCppStyleComment = True)
self.assertEqual(Line, 'line')
@@ -59,16 +59,16 @@ class CleanStringTestCase(unittest.TestCase):
class MacroParserTestCase(unittest.TestCase):
def setUp(self):
self.dec = _DecBase(FileContent('dummy', []))
-
+
def testCorrectMacro(self):
self.dec._MacroParser('DEFINE MARCRO1 = test1')
self.failIf('MARCRO1' not in self.dec._LocalMacro)
self.assertEqual(self.dec._LocalMacro['MARCRO1'], 'test1')
-
+
def testErrorMacro1(self):
# Raise fatal error, macro name must be upper case letter
self.assertRaises(FatalError, self.dec._MacroParser, 'DEFINE not_upper_case = test2')
-
+
def testErrorMacro2(self):
# No macro name given
self.assertRaises(FatalError, self.dec._MacroParser, 'DEFINE ')
@@ -81,19 +81,19 @@ class TryBackSlashTestCase(unittest.TestCase):
Content = [
# Right case
'test no backslash',
-
+
'test with backslash \\',
'continue second line',
-
+
# Do not precede with whitespace
'\\',
-
+
# Empty line after backlash is not allowed
'line with backslash \\',
''
]
self.dec = _DecBase(FileContent('dummy', Content))
-
+
def testBackSlash(self):
#
# Right case, assert return values
@@ -101,11 +101,11 @@ class TryBackSlashTestCase(unittest.TestCase):
ConcatLine, CommentList = self.dec._TryBackSlash(self.dec._RawData.GetNextLine(), [])
self.assertEqual(ConcatLine, 'test no backslash')
self.assertEqual(CommentList, [])
-
+
ConcatLine, CommentList = self.dec._TryBackSlash(self.dec._RawData.GetNextLine(), [])
self.assertEqual(CommentList, [])
self.assertEqual(ConcatLine, 'test with backslash continue second line')
-
+
#
# Error cases, assert raise exception
#
@@ -130,16 +130,16 @@ class TestInner(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = Data()
-
+
def _StopCurrentParsing(self, Line):
return Line == '[TOP]'
-
+
def _ParseItem(self):
Item = DataItem()
Item.String = self._RawData.CurrentLine
self.ItemObject.ItemList.append(Item)
return Item
-
+
def _TailCommentStrategy(self, Comment):
return Comment.find('@comment') != -1
@@ -148,7 +148,7 @@ class TestTop(_DecBase):
_DecBase.__init__(self, RawData)
# List of Data
self.ItemObject = []
-
+
# Top parser
def _StopCurrentParsing(self, Line):
return False
@@ -159,10 +159,10 @@ class TestTop(_DecBase):
self.ItemObject.append(TestParser.ItemObject)
return TestParser.ItemObject
-class ParseTestCase(unittest.TestCase):
+class ParseTestCase(unittest.TestCase):
def setUp(self):
pass
-
+
def testParse(self):
Content = \
'''# Top comment
@@ -172,26 +172,26 @@ class ParseTestCase(unittest.TestCase):
# sub2 head comment
(test item has head and special tail comment)
# @comment test TailCommentStrategy branch
-
+
(test item has no comment)
-
+
# test NextLine branch
[TOP]
sub-item
'''
dec = TestTop(FileContent('dummy', Content.splitlines()))
dec.Parse()
-
+
# Two sections
self.assertEqual(len(dec.ItemObject), 2)
-
+
data = dec.ItemObject[0]
self.assertEqual(data._HeadComment[0][0], '# Top comment')
self.assertEqual(data._HeadComment[0][1], 1)
-
+
# 3 subitems
self.assertEqual(len(data.ItemList), 3)
-
+
dataitem = data.ItemList[0]
self.assertEqual(dataitem.String, '(test item has both head and tail comment)')
# Comment content
@@ -200,7 +200,7 @@ class ParseTestCase(unittest.TestCase):
# Comment line number
self.assertEqual(dataitem._HeadComment[0][1], 3)
self.assertEqual(dataitem._TailComment[0][1], 4)
-
+
dataitem = data.ItemList[1]
self.assertEqual(dataitem.String, '(test item has head and special tail comment)')
# Comment content
@@ -209,20 +209,20 @@ class ParseTestCase(unittest.TestCase):
# Comment line number
self.assertEqual(dataitem._HeadComment[0][1], 5)
self.assertEqual(dataitem._TailComment[0][1], 7)
-
+
dataitem = data.ItemList[2]
self.assertEqual(dataitem.String, '(test item has no comment)')
# Comment content
self.assertEqual(dataitem._HeadComment, [])
self.assertEqual(dataitem._TailComment, [])
-
+
data = dec.ItemObject[1]
self.assertEqual(data._HeadComment[0][0], '# test NextLine branch')
self.assertEqual(data._HeadComment[0][1], 11)
-
+
# 1 subitems
self.assertEqual(len(data.ItemList), 1)
-
+
dataitem = data.ItemList[0]
self.assertEqual(dataitem.String, 'sub-item')
self.assertEqual(dataitem._HeadComment, [])
@@ -241,15 +241,15 @@ class DecDefineTestCase(unittest.TestCase):
item = self.GetObj('PACKAGE_NAME = MdePkg')._ParseItem()
self.assertEqual(item.Key, 'PACKAGE_NAME')
self.assertEqual(item.Value, 'MdePkg')
-
+
def testDecDefine1(self):
obj = self.GetObj('PACKAGE_NAME')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testDecDefine2(self):
obj = self.GetObj('unknown_key = ')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testDecDefine3(self):
obj = self.GetObj('PACKAGE_NAME = ')
self.assertRaises(FatalError, obj._ParseItem)
@@ -262,23 +262,23 @@ class DecLibraryTestCase(unittest.TestCase):
Obj = _DecLibraryclass(FileContent('dummy', Content.splitlines()))
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
return Obj
-
+
def testNoInc(self):
obj = self.GetObj('UefiRuntimeLib')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testEmpty(self):
obj = self.GetObj(' | ')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testLibclassNaming(self):
obj = self.GetObj('lowercase_efiRuntimeLib|Include/Library/UefiRuntimeLib.h')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testLibclassExt(self):
obj = self.GetObj('RuntimeLib|Include/Library/UefiRuntimeLib.no_h')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testLibclassRelative(self):
obj = self.GetObj('RuntimeLib|Include/../UefiRuntimeLib.h')
self.assertRaises(FatalError, obj._ParseItem)
@@ -292,7 +292,7 @@ class DecPcdTestCase(unittest.TestCase):
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
Obj._RawData.CurrentScope = [('PcdsFeatureFlag'.upper(), 'COMMON')]
return Obj
-
+
def testOK(self):
item = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d')._ParseItem()
self.assertEqual(item.TokenSpaceGuidCName, 'gEfiMdePkgTokenSpaceGuid')
@@ -300,26 +300,26 @@ class DecPcdTestCase(unittest.TestCase):
self.assertEqual(item.DefaultValue, 'FALSE')
self.assertEqual(item.DatumType, 'BOOLEAN')
self.assertEqual(item.TokenValue, '0x0000000d')
-
+
def testNoCvar(self):
obj = self.GetObj('123ai.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testSplit(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable FALSE|BOOLEAN|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
-
+
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d | abc')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testUnknownType(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|unknown|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testVoid(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|abc|VOID*|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testUINT(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|0xabc|UINT8|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
@@ -342,26 +342,26 @@ class DecGuidTestCase(unittest.TestCase):
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
Obj._RawData.CurrentScope = [('guids'.upper(), 'COMMON')]
return Obj
-
+
def testCValue(self):
item = self.GetObj('gEfiIpSecProtocolGuid={ 0xdfb386f7, 0xe100, 0x43ad,'
' {0x9c, 0x9a, 0xed, 0x90, 0xd0, 0x8a, 0x5e, 0x12 }}')._ParseItem()
self.assertEqual(item.GuidCName, 'gEfiIpSecProtocolGuid')
self.assertEqual(item.GuidCValue, '{ 0xdfb386f7, 0xe100, 0x43ad, {0x9c, 0x9a, 0xed, 0x90, 0xd0, 0x8a, 0x5e, 0x12 }}')
-
+
def testGuidString(self):
item = self.GetObj('gEfiIpSecProtocolGuid=1E73767F-8F52-4603-AEB4-F29B510B6766')._ParseItem()
self.assertEqual(item.GuidCName, 'gEfiIpSecProtocolGuid')
self.assertEqual(item.GuidCValue, '1E73767F-8F52-4603-AEB4-F29B510B6766')
-
+
def testNoValue1(self):
obj = self.GetObj('gEfiIpSecProtocolGuid')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testNoValue2(self):
obj = self.GetObj('gEfiIpSecProtocolGuid=')
self.assertRaises(FatalError, obj._ParseItem)
-
+
def testNoName(self):
obj = self.GetObj('=')
self.assertRaises(FatalError, obj._ParseItem)
@@ -376,7 +376,7 @@ class DecDecInitTestCase(unittest.TestCase):
class TmpFile:
def __init__(self, File):
self.File = File
-
+
def Write(self, Content):
try:
FileObj = open(self.File, 'w')
@@ -384,7 +384,7 @@ class TmpFile:
FileObj.close()
except:
pass
-
+
def Remove(self):
try:
os.remove(self.File)
@@ -404,13 +404,13 @@ class DecUESectionTestCase(unittest.TestCase):
[userextensions.intel."myid]
'''
)
-
+
def tearDown(self):
self.File.Remove()
-
+
def testUserExtentionHeader(self):
dec = Dec('test.dec', False)
-
+
# OK: [userextensions.intel."myid"]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
dec._UserExtentionSectionParser()
@@ -419,7 +419,7 @@ class DecUESectionTestCase(unittest.TestCase):
self.assertEqual(dec._RawData.CurrentScope[0][1], 'intel')
self.assertEqual(dec._RawData.CurrentScope[0][2], '"myid"')
self.assertEqual(dec._RawData.CurrentScope[0][3], 'COMMON')
-
+
# OK: [userextensions.intel."myid".IA32]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
dec._UserExtentionSectionParser()
@@ -428,11 +428,11 @@ class DecUESectionTestCase(unittest.TestCase):
self.assertEqual(dec._RawData.CurrentScope[0][1], 'intel')
self.assertEqual(dec._RawData.CurrentScope[0][2], '"myid"')
self.assertEqual(dec._RawData.CurrentScope[0][3], 'IA32')
-
+
# Fail: [userextensions.intel."myid".IA32,]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._UserExtentionSectionParser)
-
+
# Fail: [userextensions.intel."myid]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._UserExtentionSectionParser)
@@ -453,43 +453,43 @@ class DecSectionTestCase(unittest.TestCase):
[Includes, Includes.IA32] # common cannot be with other arch
[Includes.IA32, PcdsFeatureFlag] # different section name
''' )
-
+
def tearDown(self):
self.File.Remove()
-
+
def testSectionHeader(self):
dec = Dec('test.dec', False)
# [no section start or end
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
-
+
#[,] # empty sub-section
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
-
+
# [unknow_section_name]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
-
+
# [Includes.IA32.other] # no third one
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
-
+
# [PcdsFeatureFlag, PcdsFixedAtBuild]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
-
+
# [Includes.IA32, Includes.IA32]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
dec._SectionHeaderParser()
self.assertEqual(len(dec._RawData.CurrentScope), 1)
self.assertEqual(dec._RawData.CurrentScope[0][0], 'Includes'.upper())
self.assertEqual(dec._RawData.CurrentScope[0][1], 'IA32')
-
+
# [Includes, Includes.IA32] # common cannot be with other arch
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
-
+
# [Includes.IA32, PcdsFeatureFlag] # different section name not allowed
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
@@ -511,7 +511,7 @@ class DecDecCommentTestCase(unittest.TestCase):
self.assertEqual(dec._HeadComment[1][0], '##')
self.assertEqual(dec._HeadComment[1][1], 2)
File.Remove()
-
+
def testNoDoubleComment(self):
File = TmpFile('test.dec')
File.Write(
diff --git a/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py b/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py
index 626f17426d..9a10ec15c4 100644
--- a/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py
+++ b/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py
@@ -1,11 +1,11 @@
## @file
-# This file contain unit test for Test [Binary] section part of InfParser
+# This file contain unit test for Test [Binary] section part of InfParser
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -49,7 +49,7 @@ GUID | Test/Test.guid | DEBUG
"""
#
-# Have 3 elements, Type | FileName | Target
+# Have 3 elements, Type | FileName | Target
# Target with MACRO defined in [Define] section
#
SectionStringsCommonItem4 = \
@@ -58,7 +58,7 @@ GUID | Test/Test.guid | $(TARGET)
"""
#
-# Have 3 elements, Type | FileName | Target
+# Have 3 elements, Type | FileName | Target
# FileName with MACRO defined in [Binary] section
#
SectionStringsCommonItem5 = \
@@ -115,7 +115,7 @@ GUID | Test/Test.guid | DEBUG | MSFT | TEST | TRUE | OVERFLOW
#-------------start of VER type binary item test input------------------------#
#
-# Has 1 element, error format
+# Has 1 element, error format
#
SectionStringsVerItem1 = \
"""
@@ -199,7 +199,7 @@ UI | Test/Test2.ui | * | FALSE
"""
#
-# Has 1 element, error format
+# Has 1 element, error format
#
SectionStringsUiItem4 = \
"""
@@ -253,7 +253,7 @@ def StringToSectionString(String):
continue
SectionString.append((Line, LineNo, ''))
LineNo = LineNo + 1
-
+
return SectionString
def PrepareTest(String):
@@ -269,7 +269,7 @@ def PrepareTest(String):
#
FileName = os.path.normpath(os.path.realpath(ValueList[1].strip()))
try:
- TempFile = open (FileName, "w")
+ TempFile = open (FileName, "w")
TempFile.close()
except:
print("File Create Error")
@@ -278,24 +278,24 @@ def PrepareTest(String):
CurrentLine.SetLineString(Item[0])
CurrentLine.SetLineNo(Item[1])
InfLineCommentObject = InfLineCommentObject()
-
+
ItemList.append((ValueList, InfLineCommentObject, CurrentLine))
-
+
return ItemList
if __name__ == '__main__':
Logger.Initialize()
-
+
InfBinariesInstance = InfBinariesObject()
ArchList = ['COMMON']
Global.gINF_MODULE_DIR = os.getcwd()
-
+
AllPassedFlag = True
-
+
#
# For All Ui test
#
- UiStringList = [
+ UiStringList = [
SectionStringsUiItem1,
SectionStringsUiItem2,
SectionStringsUiItem3,
@@ -303,10 +303,10 @@ if __name__ == '__main__':
SectionStringsUiItem5,
SectionStringsUiItem6,
SectionStringsUiItem7,
- SectionStringsUiItem8
+ SectionStringsUiItem8
]
-
- for Item in UiStringList:
+
+ for Item in UiStringList:
Ui = PrepareTest(Item)
if Item == SectionStringsUiItem4 or Item == SectionStringsUiItem5:
try:
@@ -314,11 +314,11 @@ if __name__ == '__main__':
except Logger.FatalError:
pass
else:
- try:
+ try:
InfBinariesInstance.SetBinary(Ui = Ui, ArchList = ArchList)
except:
- AllPassedFlag = False
-
+ AllPassedFlag = False
+
#
# For All Ver Test
#
@@ -331,25 +331,25 @@ if __name__ == '__main__':
SectionStringsVerItem6,
SectionStringsVerItem7
]
- for Item in VerStringList:
+ for Item in VerStringList:
Ver = PrepareTest(Item)
if Item == SectionStringsVerItem1 or \
Item == SectionStringsVerItem2:
-
+
try:
InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
except:
pass
-
+
else:
try:
InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
except:
- AllPassedFlag = False
-
+ AllPassedFlag = False
+
#
# For All Common Test
- #
+ #
CommonStringList = [
SectionStringsCommonItem1,
SectionStringsCommonItem2,
@@ -363,23 +363,23 @@ if __name__ == '__main__':
SectionStringsCommonItem10
]
- for Item in CommonStringList:
+ for Item in CommonStringList:
CommonBin = PrepareTest(Item)
if Item == SectionStringsCommonItem10 or \
Item == SectionStringsCommonItem1:
-
+
try:
InfBinariesInstance.SetBinary(CommonBinary = CommonBin, ArchList = ArchList)
except:
pass
-
+
else:
try:
InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
except:
print("Test Failed!")
AllPassedFlag = False
-
+
if AllPassedFlag :
print('All tests passed...')
else:
diff --git a/BaseTools/Source/Python/UPT/Xml/CommonXml.py b/BaseTools/Source/Python/UPT/Xml/CommonXml.py
index 8a8cce1696..94fb087d1c 100644
--- a/BaseTools/Source/Python/UPT/Xml/CommonXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/CommonXml.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -123,7 +123,7 @@ class PromptXml(object):
return CreateXmlElement('%s' % Key, Prompt.GetString(), [], [['Lang', Prompt.GetLang()]])
def __str__(self):
return "Prompt = %s Lang = %s" % (self.Prompt, self.Lang)
-
+
##
# HelpTextXml
#
@@ -184,7 +184,7 @@ class HeaderXml(object):
self.CopyrightList.append((HeaderCopyrightLang, XmlElement(SubItem, '%s/Copyright' % Key)))
for SubItem in XmlList(Item, '%s/License' % Key):
HeaderLicenseLang = XmlAttribute(SubItem, 'Lang')
- self.LicenseList.append((HeaderLicenseLang, XmlElement(SubItem, '%s/License' % Key)))
+ self.LicenseList.append((HeaderLicenseLang, XmlElement(SubItem, '%s/License' % Key)))
ModuleHeader = ModuleObject()
ModuleHeader.SetName(self.Name)
ModuleHeader.SetBaseName(self.BaseName)
@@ -379,7 +379,7 @@ class PackageHeaderXml(object):
NodeList = [Element1,
Element2
]
-
+
UNIPackageAbrstractList = []
UNIPackageDescriptionList = []
# Get Abstract and Description from Uni File
@@ -391,11 +391,11 @@ class PackageHeaderXml(object):
if not StringDefClassObject.StringValue:
continue
if StringDefClassObject.StringName == DataType.TAB_DEC_PACKAGE_ABSTRACT:
- UNIPackageAbrstractList.append((GetLanguageCode1766(Lang),
+ UNIPackageAbrstractList.append((GetLanguageCode1766(Lang),
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
if StringDefClassObject.StringName == DataType.TAB_DEC_PACKAGE_DESCRIPTION:
- UNIPackageDescriptionList.append((GetLanguageCode1766(Lang),
+ UNIPackageDescriptionList.append((GetLanguageCode1766(Lang),
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
# Get Abstract and Description from DEC File Header
@@ -411,7 +411,7 @@ class PackageHeaderXml(object):
for (Lang, Value) in PackageObject2.GetDescription() + UNIPackageDescriptionList:
if Value:
NodeList.append(CreateXmlElement(DataType.TAB_HEADER_DESCRIPTION, Value, [], [['Lang', Lang]]))
-
+
NodeList.append(['PackagePath', PackageObject2.GetPackagePath()])
AttributeList = []
@@ -617,16 +617,16 @@ class UserExtensionsXml(object):
self.BinaryAbstractList.append((BinaryAbstractLang, XmlElement(SubItem, '%s/BinaryAbstract' % Key)))
for SubItem in XmlList(Item, '%s/BinaryDescription' % Key):
BinaryDescriptionLang = XmlAttribute(SubItem, 'Lang')
- self.BinaryDescriptionList.append((BinaryDescriptionLang,
+ self.BinaryDescriptionList.append((BinaryDescriptionLang,
XmlElement(SubItem, '%s/BinaryDescription' % Key)))
for SubItem in XmlList(Item, '%s/BinaryCopyright' % Key):
BinaryCopyrightLang = XmlAttribute(SubItem, 'Lang')
- self.BinaryCopyrightList.append((BinaryCopyrightLang,
+ self.BinaryCopyrightList.append((BinaryCopyrightLang,
XmlElement(SubItem, '%s/BinaryCopyright' % Key)))
for SubItem in XmlList(Item, '%s/BinaryLicense' % Key):
BinaryLicenseLang = XmlAttribute(SubItem, 'Lang')
- self.BinaryLicenseList.append((BinaryLicenseLang,
- XmlElement(SubItem, '%s/BinaryLicense' % Key)))
+ self.BinaryLicenseList.append((BinaryLicenseLang,
+ XmlElement(SubItem, '%s/BinaryLicense' % Key)))
DefineItem = XmlNode(Item, '%s/Define' % Key)
for SubItem in XmlList(DefineItem, 'Define/Statement'):
@@ -697,7 +697,7 @@ class UserExtensionsXml(object):
if Value:
ChildElement = CreateXmlElement('BinaryLicense', Value, [], [])
Root.appendChild(ChildElement)
-
+
NodeList = []
DefineDict = UserExtension.GetDefinesDict()
if DefineDict:
@@ -976,7 +976,7 @@ class FilenameXml(object):
#
if self.FileType == 'UEFI_IMAGE':
self.FileType = 'PE32'
-
+
Filename.SetGuidValue(Guid)
Filename.SetFileType(self.FileType)
Filename.SetFilename(self.Filename)
diff --git a/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py b/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py
index a747b02542..d4db9209eb 100644
--- a/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -46,17 +46,17 @@ class GuidProtocolPpiXml(object):
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
#
- # Guid/Ppi/Library, internal used for indicate return object for
+ # Guid/Ppi/Library, internal used for indicate return object for
# FromXml
#
- self.Type = ''
+ self.Type = ''
#
# there are slightly different field between package and module
#
self.Mode = Mode
self.GuidType = ''
self.VariableName = ''
-
+
def FromXml(self, Item, Key):
self.UiName = XmlAttribute(XmlNode(Item, '%s' % Key), 'UiName')
self.GuidType = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidType')
@@ -69,8 +69,8 @@ class GuidProtocolPpiXml(object):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
-
- if self.Type == 'Guid':
+
+ if self.Type == 'Guid':
GuidProtocolPpi = GuidObject()
elif self.Type == 'Protocol':
GuidProtocolPpi = ProtocolObject()
@@ -92,7 +92,7 @@ class GuidProtocolPpiXml(object):
['SupModList', GetStringOfList(GuidProtocolPpi.GetSupModuleList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
['VariableName', GuidProtocolPpi.VariableName]
]
@@ -100,7 +100,7 @@ class GuidProtocolPpiXml(object):
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
def __str__(self):
@@ -118,12 +118,12 @@ class GuidXml(GuidProtocolPpiXml):
def __init__(self, Mode):
GuidProtocolPpiXml.__init__(self, Mode)
self.Type = 'Guid'
-
- def FromXml(self, Item, Key):
+
+ def FromXml(self, Item, Key):
GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
if self.Mode == 'Package':
-
+
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
GuidProtocolPpi.SetCName(self.CName)
@@ -139,16 +139,16 @@ class GuidXml(GuidProtocolPpiXml):
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
- if self.Mode == 'Package':
+ if self.Mode == 'Package':
AttributeList = \
[['GuidType', \
GetStringOfList(GuidProtocolPpi.GetGuidTypeList())], \
['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['SupModList', \
- GetStringOfList(GuidProtocolPpi.GetSupModuleList())],
+ GetStringOfList(GuidProtocolPpi.GetSupModuleList())],
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
]
else:
@@ -159,7 +159,7 @@ class GuidXml(GuidProtocolPpiXml):
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
['VariableName', GuidProtocolPpi.GetVariableName()]
]
@@ -167,7 +167,7 @@ class GuidXml(GuidProtocolPpiXml):
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
##
#Protocol Xml
@@ -176,11 +176,11 @@ class ProtocolXml(GuidProtocolPpiXml):
def __init__(self, Mode):
GuidProtocolPpiXml.__init__(self, Mode)
self.Type = 'Protocol'
-
+
def FromXml(self, Item, Key):
GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
if self.Mode == 'Package':
- GuidProtocolPpi.SetFeatureFlag(self.CommonDefines.FeatureFlag)
+ GuidProtocolPpi.SetFeatureFlag(self.CommonDefines.FeatureFlag)
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
GuidProtocolPpi.SetCName(self.CName)
@@ -196,11 +196,11 @@ class ProtocolXml(GuidProtocolPpiXml):
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
GuidProtocolPpi.SetCName(self.CName)
-
+
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
- if self.Mode == 'Package':
+ if self.Mode == 'Package':
AttributeList = \
[['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
@@ -208,7 +208,7 @@ class ProtocolXml(GuidProtocolPpiXml):
GetStringOfList(GuidProtocolPpi.GetSupModuleList())], \
['FeatureFlag', GuidProtocolPpi.GetFeatureFlag()]
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
]
else:
@@ -219,14 +219,14 @@ class ProtocolXml(GuidProtocolPpiXml):
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
]
-
+
for Item in GuidProtocolPpi.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
##
#Ppi Xml
@@ -235,7 +235,7 @@ class PpiXml(GuidProtocolPpiXml):
def __init__(self, Mode):
GuidProtocolPpiXml.__init__(self, Mode)
self.Type = 'Ppi'
-
+
def FromXml(self, Item, Key):
GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
if self.Mode == 'Package':
@@ -254,16 +254,16 @@ class PpiXml(GuidProtocolPpiXml):
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
GuidProtocolPpi.SetCName(self.CName)
-
+
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
if self.Mode == 'Package':
AttributeList = \
[['SupArchList', \
- GetStringOfList(GuidProtocolPpi.GetSupArchList())],
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())],
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
]
else:
@@ -274,9 +274,9 @@ class PpiXml(GuidProtocolPpiXml):
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
- NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
]
-
+
for Item in GuidProtocolPpi.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
diff --git a/BaseTools/Source/Python/UPT/Xml/IniToXml.py b/BaseTools/Source/Python/UPT/Xml/IniToXml.py
index aa6f23011b..70d8fb19f2 100644
--- a/BaseTools/Source/Python/UPT/Xml/IniToXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/IniToXml.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -96,20 +96,20 @@ def ParseFileList(Line, Map, CurrentKey, PathFunc):
Attr = Token.split(TAB_EQUAL_SPLIT)
if len(Attr) != 2 or not Attr[0].strip() or not Attr[1].strip():
return False, ST.ERR_WRONG_FILELIST_FORMAT
-
+
Key = Attr[0].strip()
Val = Attr[1].strip()
if Key not in ['OS', 'Executable']:
return False, ST.ERR_UNKNOWN_FILELIST_ATTR % Key
-
- if Key == 'OS' and Val not in ["Win32", "Win64", "Linux32",
- "Linux64", "OS/X32", "OS/X64",
+
+ if Key == 'OS' and Val not in ["Win32", "Win64", "Linux32",
+ "Linux64", "OS/X32", "OS/X64",
"GenericWin", "GenericNix"]:
return False, ST.ERR_FILELIST_ATTR % 'OS'
elif Key == 'Executable' and Val not in ['true', 'false']:
return False, ST.ERR_FILELIST_ATTR % 'Executable'
FileList[1][Key] = Val
-
+
Map[CurrentKey].append(FileList)
return True, ''
@@ -143,7 +143,7 @@ def CreateHeaderXml(DistMap, Root):
#
# @param Map: Map
# @param Root: Root
-# @param Tag: Tag
+# @param Tag: Tag
#
def CreateToolsXml(Map, Root, Tag):
#
@@ -225,7 +225,7 @@ def ValidateRegValues(Key, Value):
def __ValidateDistHeaderName(Name):
if len(Name) < 1:
return False
-
+
for Char in Name:
if ord(Char) < 0x20 or ord(Char) >= 0x7f:
return False
@@ -314,12 +314,12 @@ def IniToXml(IniFile):
'ToolsHeader' : ToolsMap,
'MiscellaneousFilesHeader' : MiscMap
}
-
+
PathValidator = {
'ToolsHeader' : ValidateToolsFile,
'MiscellaneousFilesHeader' : ValidateMiscFile
}
-
+
ParsedSection = []
SectionName = ''
@@ -339,13 +339,13 @@ def IniToXml(IniFile):
if SectionName not in SectionMap:
IniParseError(ST.ERR_SECTION_NAME_INVALID % SectionName,
IniFile, Index+1)
-
+
if SectionName in ParsedSection:
IniParseError(ST.ERR_SECTION_REDEFINE % SectionName,
IniFile, Index+1)
else:
ParsedSection.append(SectionName)
-
+
Map = SectionMap[SectionName]
continue
if not Map:
@@ -363,7 +363,7 @@ def IniToXml(IniFile):
#
# Special for FileList
#
- Valid, Cause = ParseFileList(Line, Map, CurrentKey,
+ Valid, Cause = ParseFileList(Line, Map, CurrentKey,
PathValidator[SectionName])
if not Valid:
IniParseError(Cause, IniFile, Index+1)
@@ -374,7 +374,7 @@ def IniToXml(IniFile):
# Or if string on the left side of '=' is not a keyword
#
Map[CurrentKey] = ''.join([Map[CurrentKey], '\n', Line])
- Valid, Cause = ValidateValues(CurrentKey,
+ Valid, Cause = ValidateValues(CurrentKey,
Map[CurrentKey], SectionName)
if not Valid:
IniParseError(Cause, IniFile, Index+1)
@@ -390,7 +390,7 @@ def IniToXml(IniFile):
if Map[CurrentKey]:
IniParseError(ST.ERR_KEYWORD_REDEFINE % CurrentKey,
IniFile, Index+1)
-
+
if id(Map) != id(PreMap) and Map['Copyright']:
PreMap = Map
Copyright = Map['Copyright'].lower()
@@ -399,9 +399,9 @@ def IniToXml(IniFile):
IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, Index)
if not Copyright[Pos + len('copyright'):].lstrip(' ').startswith('('):
IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, Index)
-
+
if CurrentKey == 'FileList':
- Valid, Cause = ParseFileList(TokenList[1], Map, CurrentKey,
+ Valid, Cause = ParseFileList(TokenList[1], Map, CurrentKey,
PathValidator[SectionName])
if not Valid:
IniParseError(Cause, IniFile, Index+1)
@@ -411,17 +411,17 @@ def IniToXml(IniFile):
Map[CurrentKey], SectionName)
if not Valid:
IniParseError(Cause, IniFile, Index+1)
-
+
if id(Map) != id(PreMap) and Map['Copyright'] and 'copyright' not in Map['Copyright'].lower():
IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, LastIndex)
#
# Check mandatory keys
- #
- CheckMdtKeys(DistMap, IniFile, LastIndex,
+ #
+ CheckMdtKeys(DistMap, IniFile, LastIndex,
(('ToolsHeader', ToolsMap), ('MiscellaneousFilesHeader', MiscMap))
)
-
+
return CreateXml(DistMap, ToolsMap, MiscMap, IniFile)
@@ -433,15 +433,15 @@ def IniToXml(IniFile):
# @param LastIndex: Last index of Ini file
# @param Maps: Tools and Misc section name and map. (('section_name', map),*)
#
-def CheckMdtKeys(DistMap, IniFile, LastIndex, Maps):
+def CheckMdtKeys(DistMap, IniFile, LastIndex, Maps):
MdtDistKeys = ['Name', 'GUID', 'Version', 'Vendor', 'Copyright', 'License', 'Abstract', 'XmlSpecification']
for Key in MdtDistKeys:
if Key not in DistMap or DistMap[Key] == '':
IniParseError(ST.ERR_KEYWORD_MANDATORY % Key, IniFile, LastIndex+1)
-
+
if '.' not in DistMap['Version']:
DistMap['Version'] = DistMap['Version'] + '.0'
-
+
DistMap['Date'] = str(strftime("%Y-%m-%dT%H:%M:%S", localtime()))
#
@@ -464,10 +464,10 @@ def CheckMdtKeys(DistMap, IniFile, LastIndex, Maps):
for Key in Map:
if Map[Key]:
NonEmptyKey += 1
-
+
if NonEmptyKey > 0 and not Map['FileList']:
IniParseError(ST.ERR_KEYWORD_MANDATORY % (Item[0] + '.FileList'), IniFile, LastIndex+1)
-
+
if NonEmptyKey > 0 and not Map['Name']:
IniParseError(ST.ERR_KEYWORD_MANDATORY % (Item[0] + '.Name'), IniFile, LastIndex+1)
@@ -478,7 +478,7 @@ def CheckMdtKeys(DistMap, IniFile, LastIndex, Maps):
# @param MiscMap: Misc Content
# @param IniFile: Ini File
#
-def CreateXml(DistMap, ToolsMap, MiscMap, IniFile):
+def CreateXml(DistMap, ToolsMap, MiscMap, IniFile):
Attrs = [['xmlns', 'http://www.uefi.org/2011/1.1'],
['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
]
@@ -493,7 +493,7 @@ def CreateXml(DistMap, ToolsMap, MiscMap, IniFile):
else:
FileName = IniFile + '.xml'
File = open(FileName, 'w')
-
+
try:
File.write(Root.toprettyxml(indent = ' '))
finally:
diff --git a/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py b/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py
index 30091c6231..00666bfb57 100644
--- a/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -173,7 +173,7 @@ class PackageSurfaceAreaXml(object):
if XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'):
ClonedFrom = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'), 'ClonedFrom')
Package.SetClonedFromList([ClonedFrom])
- #
+ #
# LibraryClass
#
@@ -254,7 +254,7 @@ class PackageSurfaceAreaXml(object):
Tmp = PcdEntryXml()
PcdEntry = Tmp.FromXml2(SubItem, 'PcdEntry')
Package.SetPcdList(Package.GetPcdList() + [PcdEntry])
-
+
#
# Get PcdErrorCommentDict from PcdError in PcdEntry Node
#
@@ -263,7 +263,7 @@ class PackageSurfaceAreaXml(object):
if PcdErrorMessageList:
Package.PcdErrorCommentDict[(PcdEntry.GetTokenSpaceGuidCName(), PcdErrorObj.GetErrorNumber())] = \
PcdErrorMessageList
-
+
if XmlList(Item, '/PackageSurfaceArea/PcdDeclarations') and not \
XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
@@ -285,7 +285,7 @@ class PackageSurfaceAreaXml(object):
Module = Tmp.FromXml(SubItem, 'ModuleSurfaceArea')
ModuleDictKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())
Package.ModuleDict[ModuleDictKey] = Module
- #
+ #
# MiscellaneousFile
#
Tmp = MiscellaneousFileXml()
@@ -295,7 +295,7 @@ class PackageSurfaceAreaXml(object):
else:
Package.SetMiscFileList([])
- #
+ #
# UserExtensions
#
for Item in XmlList(Item, '/PackageSurfaceArea/UserExtensions'):
@@ -372,7 +372,7 @@ class PackageSurfaceAreaXml(object):
GuidProtocolPpiNode.appendChild\
(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
DomPackage.appendChild(GuidProtocolPpiNode)
- #
+ #
# Ppi
#
GuidProtocolPpiNode = CreateXmlElement('PpiDeclarations', '', [], [])
diff --git a/BaseTools/Source/Python/UPT/Xml/PcdXml.py b/BaseTools/Source/Python/UPT/Xml/PcdXml.py
index c0dc654a37..7a0eb7f3fe 100644
--- a/BaseTools/Source/Python/UPT/Xml/PcdXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/PcdXml.py
@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -49,7 +49,7 @@ class PcdErrorXml(object):
self.Expression = ''
self.ErrorNumber = ''
self.ErrorMessage = []
-
+
def FromXml(self, Item, Key):
self.ValidValueList = XmlElement(Item, '%s/ValidValueList' % Key)
self.ValidValueListLang = \
@@ -62,7 +62,7 @@ class PcdErrorXml(object):
ErrorMessageLang = \
XmlAttribute(XmlNode(ErrMsg, 'ErrorMessage'), 'Lang')
self.ErrorMessage.append((ErrorMessageLang, ErrorMessageString))
-
+
Error = PcdErrorObject()
Error.SetValidValue(self.ValidValueList)
Error.SetValidValueLang(self.ValidValueListLang)
@@ -70,7 +70,7 @@ class PcdErrorXml(object):
Error.SetExpression(self.Expression)
Error.SetErrorNumber(self.ErrorNumber)
Error.SetErrorMessageList(self.ErrorMessage)
-
+
return Error
def ToXml(self, PcdError, Key):
@@ -100,9 +100,9 @@ class PcdErrorXml(object):
CreateXmlElement('ErrorMessage', Item[1], [], [['Lang', Item[0]]])
NodeList.append(Element)
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
-
+
def TransferValidRange2Expr(self, TokenSpaceGuidCName, CName, ValidRange):
if self.Expression:
pass
@@ -125,7 +125,7 @@ class PcdErrorXml(object):
for MatchObj in HexMatch2.finditer(ValidRange):
MatchStr = MatchObj.group()
TransferedRangeStr = ' '.join(['', PcdName, MatchStr.strip()])
- ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
#
# Convert INT2 format range
#
@@ -133,36 +133,36 @@ class PcdErrorXml(object):
for MatchObj in IntMatch2.finditer(ValidRange):
MatchStr = MatchObj.group()
TransferedRangeStr = ' '.join(['', PcdName, MatchStr.strip()])
- ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
#
# Convert HEX1 format range
#
if HexMatch1:
HexMatchedList += HexMatch1.findall(ValidRange)
-
+
for MatchStr in HexMatchedList:
RangeItemList = MatchStr.strip().split('-')
TransferedRangeStr = '(%s GE %s) AND (%s LE %s)' % \
(PcdName, RangeItemList[0].strip(), PcdName, RangeItemList[1].strip())
- ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
#
# Convert INT1 format range
#
if IntMatch1:
IntMatchedList += IntMatch1.findall(ValidRange)
-
+
for MatchStr in IntMatchedList:
RangeItemList = MatchStr.strip().split('-')
TransferedRangeStr = '(%s GE %s) AND (%s LE %s)' % \
(PcdName, RangeItemList[0].strip(), PcdName, RangeItemList[1].strip())
- ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
-
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+
return ValidRange
-
+
def TransferValidEpxr2ValidRange(self, ValidRangeExpr):
if self.Expression:
pass
-
+
PCD_PATTERN = \
'[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*'
IntPattern1 = \
@@ -170,16 +170,16 @@ class PcdErrorXml(object):
PCD_PATTERN+'[\t\s]+LE[\t\s]+\d+[\t\s]*\)'
IntPattern1 = IntPattern1.replace(' ', '')
IntPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
-
+
HexPattern1 = \
'[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
PCD_PATTERN+'[\t\s]+LE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)'
HexPattern1 = HexPattern1.replace(' ', '')
HexPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][0-9a-zA-Z]+[\t\s]*'
-
+
#
# Do the Hex1 conversion
- #
+ #
HexMatchedList = re.compile(HexPattern1).findall(ValidRangeExpr)
HexRangeDict = {}
for HexMatchedItem in HexMatchedList:
@@ -188,8 +188,8 @@ class PcdErrorXml(object):
#
RangeItemList = re.compile('[\t\s]*0[xX][0-9a-fA-F]+[\t\s]*').findall(HexMatchedItem)
if RangeItemList and len(RangeItemList) == 2:
- HexRangeDict[HexMatchedItem] = RangeItemList
-
+ HexRangeDict[HexMatchedItem] = RangeItemList
+
for Key in HexRangeDict.keys():
MaxItem = MixItem = ''
if int(HexRangeDict[Key][0], 16) > int(HexRangeDict[Key][1], 16):
@@ -198,7 +198,7 @@ class PcdErrorXml(object):
else:
MaxItem = HexRangeDict[Key][1]
MixItem = HexRangeDict[Key][0]
-
+
Range = ' %s - %s' % (MixItem.strip(), MaxItem.strip())
ValidRangeExpr = ValidRangeExpr.replace(Key, Range)
#
@@ -211,9 +211,9 @@ class PcdErrorXml(object):
# To match items on both sides of '-'
#
RangeItemList = re.compile('[\t\s]*\d+[\t\s]*').findall(MatchedItem)
- if RangeItemList and len(RangeItemList) == 2:
+ if RangeItemList and len(RangeItemList) == 2:
IntRangeDict[MatchedItem] = RangeItemList
-
+
for Key in IntRangeDict.keys():
MaxItem = MixItem = ''
if int(IntRangeDict[Key][0]) > int(IntRangeDict[Key][1]):
@@ -222,11 +222,11 @@ class PcdErrorXml(object):
else:
MaxItem = IntRangeDict[Key][1]
MixItem = IntRangeDict[Key][0]
-
+
Range = ' %s - %s' % (MixItem.strip(), MaxItem.strip())
ValidRangeExpr = ValidRangeExpr.replace(Key, Range)
#
- # Do the HEX2 conversion
+ # Do the HEX2 conversion
#
for MatchObj in re.compile(HexPattern2).finditer(ValidRangeExpr):
MatchStr = MatchObj.group()
@@ -241,8 +241,8 @@ class PcdErrorXml(object):
ValidRangeExpr = ValidRangeExpr.replace(MatchStr, Range)
return ValidRangeExpr
-
-
+
+
def __str__(self):
return "ValidValueList = %s ValidValueListLang = %s ValidValueRange \
@@ -275,7 +275,7 @@ class PcdEntryXml(object):
##
# AsBuilt will use FromXml
- #
+ #
def FromXml(self, Item, Key):
self.PcdItemType = \
XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
@@ -297,7 +297,7 @@ class PcdEntryXml(object):
self.Value = XmlElement(Item, '%s/Value' % Key)
self.Offset = XmlElement(Item, '%s/Offset' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
-
+
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
@@ -306,9 +306,9 @@ class PcdEntryXml(object):
PcdErrorObjXml = PcdErrorXml()
PcdErrorObj = PcdErrorObjXml.FromXml(PcdErrorItem, 'PcdError')
self.PcdError.append(PcdErrorObj)
-
+
self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
-
+
PcdEntry = PcdObject()
PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
@@ -326,11 +326,11 @@ class PcdEntryXml(object):
PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
PcdEntry.SetPcdErrorsList(self.PcdError)
-
+
return PcdEntry
##
# Package will use FromXml2
- #
+ #
def FromXml2(self, Item, Key):
self.TokenSpaceGuidCName = \
XmlElement(Item, '%s/TokenSpaceGuidCname' % Key)
@@ -353,9 +353,9 @@ class PcdEntryXml(object):
PcdErrorObjXml = PcdErrorXml()
PcdErrorObj = PcdErrorObjXml.FromXml(PcdErrorItem, 'PcdError')
self.PcdError.append(PcdErrorObj)
-
+
self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
-
+
PcdEntry = PcdObject()
PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
PcdEntry.SetSupModuleList(self.CommonDefines.SupModList)
@@ -367,11 +367,11 @@ class PcdEntryXml(object):
PcdEntry.SetDefaultValue(self.DefaultValue)
PcdEntry.SetMaxDatumSize(self.MaxDatumSize)
PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
-
+
PcdEntry.SetPromptList(GetPromptList(self.Prompt))
PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
PcdEntry.SetPcdErrorsList(self.PcdError)
-
+
return PcdEntry
##
@@ -394,9 +394,9 @@ class PcdEntryXml(object):
PcdErrorObj = PcdErrorXml()
PcdErrorObj.FromXml(PcdErrorItem, 'PcdError')
self.PcdError.append(PcdErrorObj)
-
+
self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
-
+
PcdEntry = PcdObject()
PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
@@ -408,15 +408,15 @@ class PcdEntryXml(object):
PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
PcdEntry.SetPcdErrorsList(self.PcdError)
-
+
return PcdEntry
-
+
def ToXml(self, PcdEntry, Key):
if self.PcdCName:
pass
-
+
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
-
+
AttributeList = \
[['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
['PcdUsage', PcdEntry.GetValidUsage()], \
@@ -425,7 +425,7 @@ class PcdEntryXml(object):
]
NodeList = [['TokenSpaceGuidCname', PcdEntry.GetTokenSpaceGuidCName()],
['TokenSpaceGuidValue', PcdEntry.GetTokenSpaceGuidValue()],
- ['Token', PcdEntry.GetToken()],
+ ['Token', PcdEntry.GetToken()],
['CName', PcdEntry.GetCName()],
['DatumType', PcdEntry.GetDatumType()],
['ValidUsage', GetStringOfList(PcdEntry.GetValidUsage())],
@@ -433,26 +433,26 @@ class PcdEntryXml(object):
['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
['Offset', PcdEntry.GetOffset()],
]
-
+
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
-
+
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
##
# Package will use ToXml2
- #
+ #
def ToXml2(self, PcdEntry, Key):
if self.PcdCName:
pass
-
+
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
-
+
AttributeList = \
[['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
['SupModList', GetStringOfList(PcdEntry.GetSupModuleList())]
@@ -468,7 +468,7 @@ class PcdEntryXml(object):
for Item in PcdEntry.GetPromptList():
Tmp = PromptXml()
NodeList.append(Tmp.ToXml(Item))
-
+
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
@@ -476,9 +476,9 @@ class PcdEntryXml(object):
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
-
+
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
##
# Module will use ToXml3
@@ -486,9 +486,9 @@ class PcdEntryXml(object):
def ToXml3(self, PcdEntry, Key):
if self.PcdCName:
pass
-
+
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
-
+
AttributeList = \
[['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
['PcdUsage', PcdEntry.GetValidUsage()], \
@@ -499,29 +499,29 @@ class PcdEntryXml(object):
['TokenSpaceGuidCName', PcdEntry.GetTokenSpaceGuidCName()],
['DefaultValue', DefaultValue],
]
-
+
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
-
+
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
-
+
##
# AsBuild Module will use ToXml4
#
def ToXml4(self, PcdEntry, Key):
if self.PcdCName:
pass
-
+
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
-
+
AttributeList = []
-
+
NodeList = [
['TokenSpaceGuidValue', PcdEntry.GetTokenSpaceGuidValue()],
['PcdCName', PcdEntry.GetCName()],
@@ -531,18 +531,18 @@ class PcdEntryXml(object):
['Value', DefaultValue],
['Offset', PcdEntry.GetOffset()]
]
-
+
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
-
+
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
-
+
return Root
-
+
def __str__(self):
Str = \
diff --git a/BaseTools/Source/Python/UPT/Xml/XmlParser.py b/BaseTools/Source/Python/UPT/Xml/XmlParser.py
index dfc81567ae..82f307b3e5 100644
--- a/BaseTools/Source/Python/UPT/Xml/XmlParser.py
+++ b/BaseTools/Source/Python/UPT/Xml/XmlParser.py
@@ -1,11 +1,11 @@
## @file
# This file is used to parse a xml file of .PKG file
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -89,7 +89,7 @@ class DistributionPackageXml(object):
DPLicense = DpHeader.GetLicense()[0][1]
else:
DPLicense = ''
-
+
CheckDict['Name'] = DpHeader.GetName()
CheckDict['GUID'] = DpHeader.GetGuid()
CheckDict['Version'] = DpHeader.GetVersion()
@@ -119,7 +119,7 @@ class DistributionPackageXml(object):
ValidateMS(self.DistP.ModuleSurfaceArea[Key], ['DistributionPackage', 'ModuleSurfaceArea'])
#
- # Check Each Tool
+ # Check Each Tool
#
if self.DistP.Tools:
XmlTreeLevel = ['DistributionPackage', 'Tools', 'Header']
@@ -195,7 +195,7 @@ class DistributionPackageXml(object):
ModuleKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())
self.DistP.ModuleSurfaceArea[ModuleKey] = Module
- #
+ #
# Parse Tools
#
Tmp = MiscellaneousFileXml()
@@ -252,7 +252,7 @@ class DistributionPackageXml(object):
Msa = ModuleSurfaceAreaXml()
DomModule = Msa.ToXml(Module)
Root.appendChild(DomModule)
- #
+ #
# Parse Tools
#
Tmp = MiscellaneousFileXml()
@@ -307,7 +307,7 @@ class DistributionPackageXml(object):
#
# Remove SupModList="COMMON" or "common"
- #
+ #
XmlContent = \
re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
'[\s\r\n]*"', '', XmlContent)
@@ -324,7 +324,7 @@ class DistributionPackageXml(object):
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
-# @param XmlTreeLevel: The top level of Module
+# @param XmlTreeLevel: The top level of Module
#
def ValidateMS(Module, TopXmlTreeLevel):
ValidateMS1(Module, TopXmlTreeLevel)
@@ -336,7 +336,7 @@ def ValidateMS(Module, TopXmlTreeLevel):
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
-# @param XmlTreeLevel: The top level of Module
+# @param XmlTreeLevel: The top level of Module
#
def ValidateMS1(Module, TopXmlTreeLevel):
#
@@ -421,10 +421,10 @@ def ValidateMS1(Module, TopXmlTreeLevel):
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
- # If SupArchList is used to identify different EntryPoint, UnloadImage, Constructor/Destructor elements and
+ # If SupArchList is used to identify different EntryPoint, UnloadImage, Constructor/Destructor elements and
# that SupArchList does not match ModuleSurfaceArea.ModuleProperties:SupArchList, the tool must exit gracefully,
- # informing the user that the EDK II Build system does not support different EntryPoint, UnloadImage,
- # Constructor or Destructor elements based on Architecture type. Two SupArchList attributes are considered
+ # informing the user that the EDK II Build system does not support different EntryPoint, UnloadImage,
+ # Constructor or Destructor elements based on Architecture type. Two SupArchList attributes are considered
# identical if it lists the same CPU architectures in any order.
#
for Item in Module.GetExternList():
@@ -459,7 +459,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
-# @param XmlTreeLevel: The top level of Module
+# @param XmlTreeLevel: The top level of Module
#
def ValidateMS2(Module, TopXmlTreeLevel):
#
@@ -485,7 +485,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
Logger.Error("UPT", FORMAT_INVALID, ERR_FILE_NAME_INVALIDE % Module.GetModulePath())
#
- # Check ModuleProperties->BootMode
+ # Check ModuleProperties->BootMode
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['BootMode']
for Item in Module.GetBootModeList():
@@ -494,7 +494,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
- # Check ModuleProperties->Event
+ # Check ModuleProperties->Event
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['Event']
for Item in Module.GetEventList():
@@ -503,7 +503,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
- # Check ModuleProperties->Hob
+ # Check ModuleProperties->Hob
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['HOB']
for Item in Module.GetHobList():
@@ -512,8 +512,8 @@ def ValidateMS2(Module, TopXmlTreeLevel):
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
- # The UDP Specification supports the module type of UEFI_RUNTIME_DRIVER, which is not present in the EDK II INF
- # File Specification v. 1.23, so UPT must perform the following translation that include the generation of a
+ # The UDP Specification supports the module type of UEFI_RUNTIME_DRIVER, which is not present in the EDK II INF
+ # File Specification v. 1.23, so UPT must perform the following translation that include the generation of a
# [Depex] section.
#
if Module.ModuleType == "UEFI_RUNTIME_DRIVER":
@@ -549,7 +549,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
'Usage':Item.GetUsage()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
- # If the LibraryClass:SupModList is not "UNDEFINED" the LIBRARY_CLASS entry must have the list
+ # If the LibraryClass:SupModList is not "UNDEFINED" the LIBRARY_CLASS entry must have the list
# appended using the format:
# LIBRARY_CLASS = <ClassName> ["|" <Edk2ModuleTypeList>]
#
@@ -574,10 +574,10 @@ def ValidateMS2(Module, TopXmlTreeLevel):
#
- # For Library modules (indicated by a LIBRARY_CLASS statement in the [Defines] section)
- # If the SupModList attribute of the CONSTRUCTOR or DESTRUCTOR element does not match the Supported Module
- # Types listed after "LIBRARY_CLASS = <Keyword> |", the tool should gracefully exit with an error message
- # stating that there is a conflict in the module types the CONSTRUCTOR/DESTRUCTOR is to be used with and
+ # For Library modules (indicated by a LIBRARY_CLASS statement in the [Defines] section)
+ # If the SupModList attribute of the CONSTRUCTOR or DESTRUCTOR element does not match the Supported Module
+ # Types listed after "LIBRARY_CLASS = <Keyword> |", the tool should gracefully exit with an error message
+ # stating that there is a conflict in the module types the CONSTRUCTOR/DESTRUCTOR is to be used with and
# the Module types this Library supports.
#
if IsLibraryModule:
@@ -591,10 +591,10 @@ def ValidateMS2(Module, TopXmlTreeLevel):
RaiseError=True)
#
- # If the module is not a library module, the MODULE_TYPE listed in the ModuleSurfaceArea.Header must match the
- # SupModList attribute. If these conditions cannot be met, the tool must exit gracefully, informing the user
+ # If the module is not a library module, the MODULE_TYPE listed in the ModuleSurfaceArea.Header must match the
+ # SupModList attribute. If these conditions cannot be met, the tool must exit gracefully, informing the user
# that the EDK II Build system does not currently support the features required by this Module.
- #
+ #
if not IsLibraryModule:
for Item in Module.GetExternList():
if hasattr(Item, 'SupModList') and len(Item.SupModList) > 0 and \
@@ -628,7 +628,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
-# @param XmlTreeLevel: The top level of Module
+# @param XmlTreeLevel: The top level of Module
#
def ValidateMS3(Module, TopXmlTreeLevel):
#
@@ -666,12 +666,12 @@ def ValidateMS3(Module, TopXmlTreeLevel):
for AsBuilt in Item.GetAsBuiltList():
#
# Check LibInstance
- #
+ #
if len(AsBuilt.LibraryInstancesList) == 1 and not AsBuilt.LibraryInstancesList[0]:
CheckDict = {'GUID':''}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'LibraryInstances']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
-
+
for LibItem in AsBuilt.LibraryInstancesList:
CheckDict = {'Guid':LibItem.Guid,
'Version':LibItem.Version}
@@ -925,7 +925,7 @@ def ValidatePS2(Package):
#
# Check if any required item is missing in PackageSurfaceArea
#
-# @param Package: The PackageSurfaceArea to be checked
+# @param Package: The PackageSurfaceArea to be checked
#
def ValidatePackageSurfaceArea(Package):
ValidatePS1(Package)
diff --git a/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py b/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py
index 7e3dc94edf..d170761aad 100644
--- a/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py
+++ b/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py
@@ -1,11 +1,11 @@
## @file
# This file is used to parse a xml file of .PKG file
#
-# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -21,7 +21,7 @@ from Logger.ToolError import PARSER_ERROR
import Logger.Log as Logger
## ConvertVariableName()
-# Convert VariableName to be L"string",
+# Convert VariableName to be L"string",
# input of UCS-2 format Hex Array or L"string" (C style.) could be converted successfully,
# others will not.
#
@@ -31,11 +31,11 @@ import Logger.Log as Logger
def ConvertVariableName(VariableName):
VariableName = VariableName.strip()
#
- # check for L quoted string
+ # check for L quoted string
#
if VariableName.startswith('L"') and VariableName.endswith('"'):
return VariableName
-
+
#
# check for Hex Array, it should be little endian even number of hex numbers
#
@@ -52,7 +52,7 @@ def ConvertVariableName(VariableName):
SecondByte = int(ValueList[Index + 1], 16)
if SecondByte != 0:
return None
-
+
if FirstByte not in xrange(0x20, 0x7F):
return None
TransferedStr += ('%c')%FirstByte
@@ -63,10 +63,10 @@ def ConvertVariableName(VariableName):
## IsRequiredItemListNull
#
# Check if a required XML section item/attribue is NULL
-#
+#
# @param ItemList: The list of items to be checked
# @param XmlTreeLevel: The error message tree level
-#
+#
def IsRequiredItemListNull(ItemDict, XmlTreeLevel):
for Key in ItemDict:
if not ItemDict[Key]:
@@ -74,7 +74,7 @@ def IsRequiredItemListNull(ItemDict, XmlTreeLevel):
ErrorMsg = ERR_XML_PARSER_REQUIRED_ITEM_MISSING % (Key, Msg)
Logger.Error('\nUPT', PARSER_ERROR, ErrorMsg, RaiseError=True)
-## Get help text
+## Get help text
#
# @param HelpText
#
@@ -86,8 +86,8 @@ def GetHelpTextList(HelpText):
HelpTextObj.SetString(HelT.HelpText)
HelpTextList.append(HelpTextObj)
return HelpTextList
-
-## Get Prompt text
+
+## Get Prompt text
#
# @param Prompt
#
diff --git a/BaseTools/Source/Python/UPT/Xml/__init__.py b/BaseTools/Source/Python/UPT/Xml/__init__.py
index f09eece5fb..e8283c03cb 100644
--- a/BaseTools/Source/Python/UPT/Xml/__init__.py
+++ b/BaseTools/Source/Python/UPT/Xml/__init__.py
@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
-# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
-# This program and the accompanying materials are licensed and made available
-# under the terms and conditions of the BSD License which accompanies this
-# distribution. The full text of the license may be found at
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Xml
-''' \ No newline at end of file
+'''
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
index f35778d18a..0aa72d8302 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -369,10 +369,10 @@ class MetaFileParser(object):
EdkLogger.error("Parser", FORMAT_INVALID, "Global macro %s is not permitted." % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
else:
EdkLogger.error("Parser", FORMAT_INVALID, "%s not defined" % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- # Sometimes, we need to make differences between EDK and EDK2 modules
+ # Sometimes, we need to make differences between EDK and EDK2 modules
if Name == 'INF_VERSION':
if hexVersionPattern.match(Value):
- self._Version = int(Value, 0)
+ self._Version = int(Value, 0)
elif decVersionPattern.match(Value):
ValueList = Value.split('.')
Major = '%04o' % int(ValueList[0], 0)
@@ -418,7 +418,7 @@ class MetaFileParser(object):
Macros.update(self._GetApplicableSectionMacro())
return Macros
- ## Construct section Macro dict
+ ## Construct section Macro dict
def _ConstructSectionMacroDict(self, Name, Value):
ScopeKey = [(Scope[0], Scope[1], Scope[2]) for Scope in self._Scope]
ScopeKey = tuple(ScopeKey)
@@ -433,7 +433,7 @@ class MetaFileParser(object):
self._SectionsMacroDict[SectionDictKey][Name] = Value
- ## Get section Macros that are applicable to current line, which may come from other sections
+ ## Get section Macros that are applicable to current line, which may come from other sections
## that share the same name while scope is wider
def _GetApplicableSectionMacro(self):
Macros = {}
@@ -1351,7 +1351,7 @@ class DscParser(MetaFileParser):
try:
Processer[self._ItemType]()
except EvaluationException as Excpt:
- #
+ #
# Only catch expression evaluation error here. We need to report
# the precise number of line on which the error occurred
#
@@ -1478,7 +1478,7 @@ class DscParser(MetaFileParser):
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
Result = False
except WrnExpression as Excpt:
- #
+ #
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
@@ -1535,7 +1535,7 @@ class DscParser(MetaFileParser):
__IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE']
__IncludeMacros['EDK_SOURCE'] = GlobalData.gGlobalDefines['EDK_SOURCE']
#
- # Allow using MACROs comes from [Defines] section to keep compatible.
+ # Allow using MACROs comes from [Defines] section to keep compatible.
#
__IncludeMacros.update(self._Macros)
diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py
index f528c1ee66..ee1c7fffca 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileTable.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileTable.py
@@ -56,7 +56,7 @@ class MetaFileTable(Table):
Result = self.Cur.execute("select ID from %s where ID<0" % (self.Table)).fetchall()
if not Result:
# update the timestamp in database
- self._FileIndexTable.SetFileTimeStamp(self.IdBase, TimeStamp)
+ self._FileIndexTable.SetFileTimeStamp(self.IdBase, TimeStamp)
return False
if TimeStamp != self._FileIndexTable.GetFileTimeStamp(self.IdBase):
@@ -113,28 +113,28 @@ class ModuleTable(MetaFileTable):
BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
Scope2,
- BelongsToItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
Enabled
)
## Query table
#
- # @param Model: The Model of Record
- # @param Arch: The Arch attribute of Record
- # @param Platform The Platform attribute of Record
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ # @param Platform The Platform attribute of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None, Platform=None, BelongsToItem=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
@@ -195,27 +195,27 @@ class PackageTable(MetaFileTable):
BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
Scope2,
- BelongsToItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
Enabled
)
## Query table
#
- # @param Model: The Model of Record
- # @param Arch: The Arch attribute of Record
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
@@ -236,7 +236,7 @@ class PackageTable(MetaFileTable):
try:
for row in self.Cur:
comment = row[0]
-
+
LineNum = row[1]
comment = comment.strip("#")
comment = comment.strip()
@@ -310,32 +310,32 @@ class PlatformTable(MetaFileTable):
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2, Scope3) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2, Scope3))
return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
Scope2,
Scope3,
- BelongsToItem,
+ BelongsToItem,
FromItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
Enabled
)
## Query table
#
- # @param Model: The Model of Record
+ # @param Model: The Model of Record
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
#
- # @retval: A recordSet of all found records
+ # @retval: A recordSet of all found records
#
def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
ConditionString = "Model=%s AND Enabled>0" % Model
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
index 9e055fcfc4..a40ab8fc8c 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -114,8 +114,8 @@ class WorkspaceDatabase(object):
# get the parser ready for this file
MetaFile = self._FILE_PARSER_[FileType](
- FilePath,
- FileType,
+ FilePath,
+ FileType,
Arch,
MetaFileStorage(self.WorkspaceDb.Cur, FilePath, FileType)
)
@@ -162,7 +162,7 @@ class WorkspaceDatabase(object):
# remove db file in case inconsistency between db and file in file system
if self._CheckWhetherDbNeedRenew(RenewDb, DbPath):
os.remove(DbPath)
-
+
# create db with optimized parameters
self.Conn = sqlite3.connect(DbPath, isolation_level='DEFERRED')
self.Conn.execute("PRAGMA synchronous=OFF")
@@ -199,11 +199,11 @@ class WorkspaceDatabase(object):
def _CheckWhetherDbNeedRenew (self, force, DbPath):
# if database does not exist, we need do nothing
if not os.path.exists(DbPath): return False
-
+
# if user force to renew database, then not check whether database is out of date
if force: return True
-
- #
+
+ #
# Check the time of last modified source file or build.exe
# if is newer than time of database, then database need to be re-created.
#
@@ -217,15 +217,15 @@ class WorkspaceDatabase(object):
if rootPath == "" or rootPath is None:
EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
determine whether database file is out of date!\n")
-
+
# walk the root path of source or build's binary to get the time last modified.
-
+
for root, dirs, files in os.walk (rootPath):
for dir in dirs:
- # bypass source control folder
+ # bypass source control folder
if dir.lower() in [".svn", "_svn", "cvs"]:
dirs.remove(dir)
-
+
for file in files:
ext = os.path.splitext(file)[1]
if ext.lower() == ".py": # only check .py files
@@ -235,9 +235,9 @@ determine whether database file is out of date!\n")
if timeOfToolModified > os.stat(DbPath).st_mtime:
EdkLogger.verbose("\nWorkspace database is out of data!")
return True
-
+
return False
-
+
## Initialize build database
def InitDatabase(self):
EdkLogger.verbose("\nInitialize build database started ...")
diff --git a/BaseTools/Source/Python/build/BuildReport.py b/BaseTools/Source/Python/build/BuildReport.py
index 897167cd11..1555ec3c21 100644
--- a/BaseTools/Source/Python/build/BuildReport.py
+++ b/BaseTools/Source/Python/build/BuildReport.py
@@ -220,7 +220,7 @@ def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
## Split each lines in file
#
-# This method is used to split the lines in file to make the length of each line
+# This method is used to split the lines in file to make the length of each line
# less than MaxLength.
#
# @param Content The content of file
@@ -245,12 +245,12 @@ def FileLinesSplit(Content=None, MaxLength=None):
NewContentList.append(Line)
for NewLine in NewContentList:
NewContent += NewLine + TAB_LINE_BREAK
-
+
NewContent = NewContent.replace(TAB_LINE_BREAK, gEndOfLine).replace('\r\r\n', gEndOfLine)
return NewContent
-
-
-
+
+
+
##
# Parse binary dependency expression section
#
@@ -280,10 +280,10 @@ class DepexParser(object):
for Guid in Package.Guids:
GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])
self._GuidDb[GuidValue.upper()] = Guid
-
+
##
# Parse the binary dependency expression files.
- #
+ #
# This function parses the binary dependency expression file and translate it
# to the instruction list.
#
@@ -305,7 +305,7 @@ class DepexParser(object):
OpCode = DepexFile.read(1)
return DepexStatement
-
+
##
# Reports library information
#
@@ -411,7 +411,7 @@ class DepexReport(object):
if ModuleType in [SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE, SUP_MODULE_UEFI_APPLICATION]:
return
-
+
for Source in M.SourceFileList:
if os.path.splitext(Source.Path)[1].lower() == ".dxs":
Match = gDxsDependencyPattern.search(open(Source.Path).read())
@@ -457,7 +457,7 @@ class DepexReport(object):
FileWrite(File, gSubSectionSep)
except:
EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)
-
+
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
if self.Source == "INF":
@@ -1548,7 +1548,7 @@ class PredictionReport(object):
EotEndTime = time.time()
EotDuration = time.strftime("%H:%M:%S", time.gmtime(int(round(EotEndTime - EotStartTime))))
EdkLogger.quiet("EOT run time: %s\n" % EotDuration)
-
+
#
# Parse the output of EOT tool
#
@@ -1746,7 +1746,7 @@ class FdRegionReport(object):
PlatformPcds = {}
#
# Collect PCDs declared in DEC files.
- #
+ #
for Pa in Wa.AutoGenObjectList:
for Package in Pa.PackageList:
for (TokenCName, TokenSpaceGuidCName, DecType) in Package.Pcds:
@@ -2026,7 +2026,7 @@ class PlatformReport(object):
self.DepexParser = None
if "DEPEX" in ReportType:
self.DepexParser = DepexParser(Wa)
-
+
self.ModuleReportList = []
if MaList is not None:
self._IsModuleBuild = True
@@ -2101,7 +2101,7 @@ class PlatformReport(object):
if not self._IsModuleBuild:
if "PCD" in ReportType:
self.PcdReport.GenerateReport(File, None)
-
+
if "FLASH" in ReportType:
for FdReportListItem in self.FdReportList:
FdReportListItem.GenerateReport(File)
@@ -2135,7 +2135,7 @@ class BuildReport(object):
if ReportFile:
self.ReportList = []
self.ReportType = []
- if ReportType:
+ if ReportType:
for ReportTypeItem in ReportType:
if ReportTypeItem not in self.ReportType:
self.ReportType.append(ReportTypeItem)
@@ -2181,7 +2181,7 @@ class BuildReport(object):
EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report", ExtraData=self.ReportFile, RaiseError=False)
EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
File.close()
-
+
# This acts like the main() function for the script, unless it is 'import'ed into another script.
if __name__ == '__main__':
pass
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py
index 08e81016de..6ad0e38968 100644
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -59,7 +59,7 @@ from collections import OrderedDict, defaultdict
# Version and Copyright
VersionNumber = "0.60" + ' ' + gBUILD_VERSION
__version__ = "%prog Version " + VersionNumber
-__copyright__ = "Copyright (c) 2007 - 2017, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
## standard targets of build command
gSupportedTarget = ['all', 'genc', 'genmake', 'modules', 'libraries', 'fds', 'clean', 'cleanall', 'cleanlib', 'run']
@@ -111,7 +111,7 @@ def CheckEnvVariable():
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path",
ExtraData=WorkspaceDir)
os.environ["WORKSPACE"] = WorkspaceDir
-
+
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(WorkspaceDir, PackagesPath)
@@ -201,7 +201,7 @@ def CheckEnvVariable():
GlobalData.gGlobalDefines["EDK_SOURCE"] = EdkSourceDir
GlobalData.gGlobalDefines["ECP_SOURCE"] = EcpSourceDir
GlobalData.gGlobalDefines["EDK_TOOLS_PATH"] = os.environ["EDK_TOOLS_PATH"]
-
+
## Get normalized file path
#
# Convert the path to be local format, and remove the WORKSPACE path at the
@@ -266,7 +266,7 @@ def LaunchCommand(Command, WorkingDir):
# if working directory doesn't exist, Popen() will raise an exception
if not os.path.isdir(WorkingDir):
EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=WorkingDir)
-
+
# Command is used as the first Argument in following Popen().
# It could be a string or sequence. We find that if command is a string in following Popen(),
# ubuntu may fail with an error message that the command is not found.
@@ -849,14 +849,14 @@ class Build():
# print current build environment and configuration
EdkLogger.quiet("%-16s = %s" % ("WORKSPACE", os.environ["WORKSPACE"]))
if "PACKAGES_PATH" in os.environ:
- # WORKSPACE env has been converted before. Print the same path style with WORKSPACE env.
+ # WORKSPACE env has been converted before. Print the same path style with WORKSPACE env.
EdkLogger.quiet("%-16s = %s" % ("PACKAGES_PATH", os.path.normcase(os.path.normpath(os.environ["PACKAGES_PATH"]))))
EdkLogger.quiet("%-16s = %s" % ("ECP_SOURCE", os.environ["ECP_SOURCE"]))
EdkLogger.quiet("%-16s = %s" % ("EDK_SOURCE", os.environ["EDK_SOURCE"]))
EdkLogger.quiet("%-16s = %s" % ("EFI_SOURCE", os.environ["EFI_SOURCE"]))
EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_PATH", os.environ["EDK_TOOLS_PATH"]))
if "EDK_TOOLS_BIN" in os.environ:
- # Print the same path style with WORKSPACE env.
+ # Print the same path style with WORKSPACE env.
EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_BIN", os.path.normcase(os.path.normpath(os.environ["EDK_TOOLS_BIN"]))))
EdkLogger.quiet("%-16s = %s" % ("CONF_PATH", GlobalData.gConfDirectory))
self.InitPreBuild()
@@ -1962,7 +1962,7 @@ class Build():
self._SaveMapFile (MapBuffer, Wa)
def _GenFfsCmd(self):
- # convert dictionary of Cmd:(Inf,Arch)
+ # convert dictionary of Cmd:(Inf,Arch)
# to a new dictionary of (Inf,Arch):Cmd,Cmd,Cmd...
CmdSetDict = defaultdict(set)
GenFfsDict = GenFds.GenFfsMakefile('', GlobalData.gFdfParser, self, self.ArchList, GlobalData)
@@ -2034,7 +2034,7 @@ class Build():
for Module in ModuleList:
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
-
+
if Ma is None:
continue
if Ma.CanSkipbyHash():
diff --git a/BaseTools/Source/Python/sitecustomize.py b/BaseTools/Source/Python/sitecustomize.py
index 4ea84c5129..3afa90700e 100644
--- a/BaseTools/Source/Python/sitecustomize.py
+++ b/BaseTools/Source/Python/sitecustomize.py
@@ -16,6 +16,6 @@ import locale
if sys.platform == "darwin":
DefaultLocal = locale.getdefaultlocale()[1]
if DefaultLocal is None:
- DefaultLocal = 'UTF8'
+ DefaultLocal = 'UTF8'
sys.setdefaultencoding(DefaultLocal)