summaryrefslogtreecommitdiffstats
path: root/BaseTools/Source/Python
diff options
context:
space:
mode:
Diffstat (limited to 'BaseTools/Source/Python')
-rw-r--r--BaseTools/Source/Python/AutoGen/AutoGen.py38
-rw-r--r--BaseTools/Source/Python/AutoGen/BuildEngine.py12
-rw-r--r--BaseTools/Source/Python/AutoGen/GenMake.py4
-rw-r--r--BaseTools/Source/Python/AutoGen/GenPcdDb.py6
-rw-r--r--BaseTools/Source/Python/AutoGen/InfSectionParser.py10
-rw-r--r--BaseTools/Source/Python/AutoGen/UniClassObject.py4
-rw-r--r--BaseTools/Source/Python/Common/BuildToolError.py2
-rw-r--r--BaseTools/Source/Python/Common/EdkLogger.py6
-rw-r--r--BaseTools/Source/Python/Common/Misc.py20
-rw-r--r--BaseTools/Source/Python/Common/MultipleWorkspace.py4
-rw-r--r--BaseTools/Source/Python/Common/Parsing.py6
-rw-r--r--BaseTools/Source/Python/Common/RangeExpression.py4
-rw-r--r--BaseTools/Source/Python/Common/StringUtils.py14
-rw-r--r--BaseTools/Source/Python/CommonDataClass/CommonClass.py2
-rw-r--r--BaseTools/Source/Python/CommonDataClass/DataClass.py4
-rw-r--r--BaseTools/Source/Python/Ecc/C.g8
-rw-r--r--BaseTools/Source/Python/Ecc/CParser3/CLexer.py2
-rw-r--r--BaseTools/Source/Python/Ecc/CParser3/CParser.py2
-rw-r--r--BaseTools/Source/Python/Ecc/Check.py14
-rw-r--r--BaseTools/Source/Python/Ecc/Database.py8
-rw-r--r--BaseTools/Source/Python/Ecc/EccMain.py4
-rw-r--r--BaseTools/Source/Python/Ecc/EccToolError.py4
-rw-r--r--BaseTools/Source/Python/Ecc/MetaDataParser.py2
-rw-r--r--BaseTools/Source/Python/Ecc/c.py8
-rw-r--r--BaseTools/Source/Python/Eot/CParser3/CLexer.py2
-rw-r--r--BaseTools/Source/Python/Eot/CParser3/CParser.py2
-rw-r--r--BaseTools/Source/Python/Eot/CodeFragmentCollector.py6
-rw-r--r--BaseTools/Source/Python/Eot/Database.py6
-rw-r--r--BaseTools/Source/Python/Eot/EotGlobalData.py2
-rw-r--r--BaseTools/Source/Python/Eot/EotMain.py2
-rw-r--r--BaseTools/Source/Python/Eot/InfParserLite.py4
-rw-r--r--BaseTools/Source/Python/Eot/Parser.py4
-rw-r--r--BaseTools/Source/Python/Eot/Report.py2
-rw-r--r--BaseTools/Source/Python/GenFds/FdfParser.py30
-rw-r--r--BaseTools/Source/Python/GenFds/FfsInfStatement.py4
-rw-r--r--BaseTools/Source/Python/GenFds/Fv.py2
-rw-r--r--BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py2
-rw-r--r--BaseTools/Source/Python/Table/Table.py2
-rw-r--r--BaseTools/Source/Python/Table/TableFunction.py4
-rw-r--r--BaseTools/Source/Python/Table/TableQuery.py2
-rw-r--r--BaseTools/Source/Python/TargetTool/TargetTool.py4
-rw-r--r--BaseTools/Source/Python/Trim/Trim.py2
-rw-r--r--BaseTools/Source/Python/UPT/Core/DependencyRules.py2
-rw-r--r--BaseTools/Source/Python/UPT/Core/IpiDb.py16
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py4
-rw-r--r--BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py4
-rw-r--r--BaseTools/Source/Python/UPT/InstallPkg.py20
-rw-r--r--BaseTools/Source/Python/UPT/Library/CommentParsing.py14
-rw-r--r--BaseTools/Source/Python/UPT/Library/DataType.py2
-rw-r--r--BaseTools/Source/Python/UPT/Library/GlobalData.py2
-rw-r--r--BaseTools/Source/Python/UPT/Library/Misc.py8
-rw-r--r--BaseTools/Source/Python/UPT/Library/ParserValidate.py14
-rw-r--r--BaseTools/Source/Python/UPT/Library/Parsing.py2
-rw-r--r--BaseTools/Source/Python/UPT/Library/StringUtils.py12
-rw-r--r--BaseTools/Source/Python/UPT/Library/UniClassObject.py20
-rw-r--r--BaseTools/Source/Python/UPT/Logger/Log.py6
-rw-r--r--BaseTools/Source/Python/UPT/Logger/StringTable.py12
-rw-r--r--BaseTools/Source/Python/UPT/Logger/ToolError.py2
-rw-r--r--BaseTools/Source/Python/UPT/MkPkg.py2
-rw-r--r--BaseTools/Source/Python/UPT/Object/Parser/DecObject.py4
-rw-r--r--BaseTools/Source/Python/UPT/Parser/DecParser.py4
-rw-r--r--BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py4
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py4
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py2
-rw-r--r--BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py4
-rw-r--r--BaseTools/Source/Python/UPT/RmPkg.py4
-rw-r--r--BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py6
-rw-r--r--BaseTools/Source/Python/Workspace/BuildClassObject.py2
-rw-r--r--BaseTools/Source/Python/Workspace/DecBuildData.py8
-rw-r--r--BaseTools/Source/Python/Workspace/DscBuildData.py2
-rw-r--r--BaseTools/Source/Python/Workspace/InfBuildData.py4
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileParser.py2
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceCommon.py2
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceDatabase.py8
-rw-r--r--BaseTools/Source/Python/build/build.py8
75 files changed, 240 insertions, 240 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
index 12592a2a46..2452ecbcba 100644
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -1187,7 +1187,7 @@ class PlatformAutoGen(AutoGen):
#
@cached_class_function
def CreateCodeFile(self, CreateModuleCodeFile=False):
- # only module has code to be greated, so do nothing if CreateModuleCodeFile is False
+ # only module has code to be created, so do nothing if CreateModuleCodeFile is False
if not CreateModuleCodeFile:
return
@@ -1849,7 +1849,7 @@ class PlatformAutoGen(AutoGen):
## Get tool chain definition
#
- # Get each tool defition for given tool chain from tools_def.txt and platform
+ # Get each tool definition for given tool chain from tools_def.txt and platform
#
@cached_property
def ToolDefinition(self):
@@ -2130,8 +2130,8 @@ class PlatformAutoGen(AutoGen):
## Override PCD setting (type, value, ...)
#
- # @param ToPcd The PCD to be overrided
- # @param FromPcd The PCD overrideing from
+ # @param ToPcd The PCD to be overridden
+ # @param FromPcd The PCD overriding from
#
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
#
@@ -2218,7 +2218,7 @@ class PlatformAutoGen(AutoGen):
## Apply PCD setting defined platform to a module
#
- # @param Module The module from which the PCD setting will be overrided
+ # @param Module The module from which the PCD setting will be overridden
#
# @retval PCD_list The list PCDs with settings from platform
#
@@ -2450,7 +2450,7 @@ class PlatformAutoGen(AutoGen):
## Append build options in platform to a module
#
- # @param Module The module to which the build options will be appened
+ # @param Module The module to which the build options will be appended
#
# @retval options The options appended with build options in platform
#
@@ -2560,7 +2560,7 @@ class ModuleAutoGen(AutoGen):
self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
self.SourceOverrideDir = None
- # use overrided path defined in DSC file
+ # use overridden path defined in DSC file
if self.MetaFile.Key in GlobalData.gOverrideDir:
self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]
@@ -2685,7 +2685,7 @@ class ModuleAutoGen(AutoGen):
def Guid(self):
#
# To build same module more than once, the module path with FILE_GUID overridden has
- # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
# in DSC. The overridden GUID can be retrieved from file name
#
if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
@@ -2745,7 +2745,7 @@ class ModuleAutoGen(AutoGen):
self.MetaFile.BaseName
))
- ## Return the directory to store the intermediate object files of the mdoule
+ ## Return the directory to store the intermediate object files of the module
@cached_property
def OutputDir(self):
return _MakeDir((self.BuildDir, "OUTPUT"))
@@ -2757,7 +2757,7 @@ class ModuleAutoGen(AutoGen):
return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
return ''
- ## Return the directory to store auto-gened source files of the mdoule
+ ## Return the directory to store auto-gened source files of the module
@cached_property
def DebugDir(self):
return _MakeDir((self.BuildDir, "DEBUG"))
@@ -2809,7 +2809,7 @@ class ModuleAutoGen(AutoGen):
## Get the depex string
#
- # @return : a string contain all depex expresion.
+ # @return : a string contain all depex expression.
def _GetDepexExpresionString(self):
DepexStr = ''
DepexList = []
@@ -2819,11 +2819,11 @@ class ModuleAutoGen(AutoGen):
for M in [self.Module] + self.DependentLibraryList:
Filename = M.MetaFile.Path
InfObj = InfSectionParser.InfSectionParser(Filename)
- DepexExpresionList = InfObj.GetDepexExpresionList()
- for DepexExpresion in DepexExpresionList:
- for key in DepexExpresion:
+ DepexExpressionList = InfObj.GetDepexExpresionList()
+ for DepexExpression in DepexExpressionList:
+ for key in DepexExpression:
Arch, ModuleType = key
- DepexExpr = [x for x in DepexExpresion[key] if not str(x).startswith('#')]
+ DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
# the type of build module is USER_DEFINED.
# All different DEPEX section tags would be copied into the As Built INF file
# and there would be separate DEPEX section tags
@@ -2878,7 +2878,7 @@ class ModuleAutoGen(AutoGen):
DepexList = []
#
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
#
for M in [self.Module] + self.DependentLibraryList:
Inherited = False
@@ -3323,7 +3323,7 @@ class ModuleAutoGen(AutoGen):
IdfGenBinBuffer.close()
return RetVal
- ## Return the list of library modules explicitly or implicityly used by this module
+ ## Return the list of library modules explicitly or implicitly used by this module
@cached_property
def DependentLibraryList(self):
# only merge library classes and PCD for non-library module
@@ -3894,8 +3894,8 @@ class ModuleAutoGen(AutoGen):
AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
# Generated depex expression section in comments.
- DepexExpresion = self._GetDepexExpresionString()
- AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''
+ DepexExpression = self._GetDepexExpresionString()
+ AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
AsBuiltInf = TemplateString()
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
index f9e3ac3a1d..2cea97ae10 100644
--- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
+++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py
@@ -92,8 +92,8 @@ class FileBuildRule:
## constructor
#
- # @param Input The dictionary represeting input file(s) for a rule
- # @param Output The list represeting output file(s) for a rule
+ # @param Input The dictionary representing input file(s) for a rule
+ # @param Output The list representing output file(s) for a rule
# @param Command The list containing commands to generate the output from input
#
def __init__(self, Type, Input, Output, Command, ExtraDependency=None):
@@ -193,7 +193,7 @@ class FileBuildRule:
# @param RelativeToDir The relative path of the source file
# @param PathSeparator Path separator
#
- # @retval tuple (Source file in full path, List of individual sourcefiles, Destionation file, List of build commands)
+ # @retval tuple (Source file in full path, List of individual sourcefiles, Destination file, List of build commands)
#
def Apply(self, SourceFile, BuildRuleOrder=None):
if not self.CommandList or not self.DestFileList:
@@ -396,7 +396,7 @@ class BuildRule:
# @param LineIndex The line index of build rule text
#
def ParseSubSection(self, LineIndex):
- # currenly nothing here
+ # currently nothing here
pass
## Placeholder for not supported sections
@@ -409,7 +409,7 @@ class BuildRule:
## Merge section information just got into rule database
def EndOfSection(self):
Database = self.RuleDatabase
- # if there's specific toochain family, 'COMMON' doesn't make sense any more
+ # if there's specific toolchain family, 'COMMON' doesn't make sense any more
if len(self._TotalToolChainFamilySet) > 1 and TAB_COMMON in self._TotalToolChainFamilySet:
self._TotalToolChainFamilySet.remove(TAB_COMMON)
for Family in self._TotalToolChainFamilySet:
@@ -554,7 +554,7 @@ class BuildRule:
# @param FileExt The extension of a file
# @param ToolChainFamily The tool chain family name
# @param BuildVersion The build version number. TAB_STAR means any rule
- # is applicalbe.
+ # is applicable.
#
# @retval FileType The file type string
# @retval FileBuildRule The object of FileBuildRule
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
index dc4cd688f4..53c5b8577d 100644
--- a/BaseTools/Source/Python/AutoGen/GenMake.py
+++ b/BaseTools/Source/Python/AutoGen/GenMake.py
@@ -959,7 +959,7 @@ cleanlib:
NewFile = self.PlaceMacro(str(F), self.Macros)
# In order to use file list macro as dependency
if T.GenListFile:
- # gnu tools need forward slash path separater, even on Windows
+ # gnu tools need forward slash path separator, even on Windows
self.ListFileMacros[T.ListFileMacro].append(str(F).replace ('\\', '/'))
self.FileListMacros[T.FileListMacro].append(NewFile)
elif T.GenFileListMacro:
@@ -1003,7 +1003,7 @@ cleanlib:
## Find dependencies for one source file
#
# By searching recursively "#include" directive in file, find out all the
- # files needed by given source file. The dependecies will be only searched
+ # files needed by given source file. The dependencies will be only searched
# in given search path list.
#
# @param File The source file
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
index cbf7a39dd5..b3f4ccf4f4 100644
--- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py
+++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
@@ -342,7 +342,7 @@ class DbComItemList (DbItemList):
Offset = 0
if self.ItemSize == 0:
#
- # Variable length, need to calculte one by one
+ # Variable length, need to calculate one by one
# The only variable table is stringtable, it is not Composite item, should not reach here
#
assert(False)
@@ -631,7 +631,7 @@ def BuildExDataBase(Dict):
Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
StringTableValue = Dict['STRING_TABLE_DB_VALUE']
- # when calcute the offset, should use StringTableLen instead of StringTableValue, as string maxium len may be different with actual len
+ # when calcute the offset, should use StringTableLen instead of StringTableValue, as string maximum len may be different with actual len
StringTableLen = Dict['STRING_TABLE_LENGTH']
DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
@@ -1011,7 +1011,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
'SKUID_TABLE_EMPTY' : 'TRUE',
'DATABASE_EMPTY' : 'TRUE',
'EXMAP_TABLE_EMPTY' : 'TRUE',
- 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is emptry */',
+ 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is empty */',
'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
'SYSTEM_SKU_ID_VALUE' : '0U'
}
diff --git a/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
index 09e9af3fb4..388b6780df 100644
--- a/BaseTools/Source/Python/AutoGen/InfSectionParser.py
+++ b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
@@ -80,13 +80,13 @@ class InfSectionParser():
UserExtensionTianoCore.append({SubSection: SectionDataDict[key]})
return UserExtensionTianoCore
- # Get depex expresion
+ # Get depex expression
#
# @return: a list include some dictionary that key is section and value is a list contain all data.
def GetDepexExpresionList(self):
- DepexExpresionList = []
+ DepexExpressionList = []
if not self._FileSectionDataList:
- return DepexExpresionList
+ return DepexExpressionList
for SectionDataDict in self._FileSectionDataList:
for key in SectionDataDict:
if key.lower() == "[depex]" or key.lower().startswith("[depex."):
@@ -105,8 +105,8 @@ class InfSectionParser():
SubKey = (SectionList[1], SectionList[2])
else:
EdkLogger.error("build", AUTOGEN_ERROR, 'Section %s is invalid.' % key)
- DepexExpresionList.append({SubKey: SectionDataDict[key]})
- return DepexExpresionList
+ DepexExpressionList.append({SubKey: SectionDataDict[key]})
+ return DepexExpressionList
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py
index d162387cc5..5674adf3b8 100644
--- a/BaseTools/Source/Python/AutoGen/UniClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -99,12 +99,12 @@ LangConvTable = {'eng':'en', 'fra':'fr', \
## GetLanguageCode
#
# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
-# ISO 639-2 language codes supported in compatiblity mode
+# ISO 639-2 language codes supported in compatibility mode
# RFC 4646 language codes supported in native mode
#
# @param LangName: Language codes read from .UNI file
#
-# @retval LangName: Valid lanugage code in RFC 4646 format or None
+# @retval LangName: Valid language code in RFC 4646 format or None
#
def GetLanguageCode(LangName, IsCompatibleMode, File):
length = len(LangName)
diff --git a/BaseTools/Source/Python/Common/BuildToolError.py b/BaseTools/Source/Python/Common/BuildToolError.py
index fc2503ea80..09582b932a 100644
--- a/BaseTools/Source/Python/Common/BuildToolError.py
+++ b/BaseTools/Source/Python/Common/BuildToolError.py
@@ -1,5 +1,5 @@
## @file
-# Standardized Error Hanlding infrastructures.
+# Standardized Error Handling infrastructures.
#
# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
diff --git a/BaseTools/Source/Python/Common/EdkLogger.py b/BaseTools/Source/Python/Common/EdkLogger.py
index af7707482c..77c0d2a28e 100644
--- a/BaseTools/Source/Python/Common/EdkLogger.py
+++ b/BaseTools/Source/Python/Common/EdkLogger.py
@@ -145,7 +145,7 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
_InfoLogger.log(WARN, LogText)
- # Raise an execption if indicated
+ # Raise an exception if indicated
if _WarningAsError == True:
raise FatalError(WARNING_AS_ERROR)
@@ -155,7 +155,7 @@ info = _InfoLogger.info
## Log ERROR message
#
# Once an error messages is logged, the tool's execution will be broken by raising
-# an execption. If you don't want to break the execution later, you can give
+# an exception. If you don't want to break the execution later, you can give
# "RaiseError" with "False" value.
#
# @param ToolName The name of the tool. If not given, the name of caller
@@ -165,7 +165,7 @@ info = _InfoLogger.info
# @param File The name of file which caused the error.
# @param Line The line number in the "File" which caused the warning.
# @param ExtraData More information associated with "Message"
-# @param RaiseError Raise an exception to break the tool's executuion if
+# @param RaiseError Raise an exception to break the tool's execution if
# it's True. This is the default behavior.
#
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
index 6b3c4f7937..565a5ad40b 100644
--- a/BaseTools/Source/Python/Common/Misc.py
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -775,10 +775,10 @@ class Progressor:
## Constructor
#
- # @param OpenMessage The string printed before progress charaters
- # @param CloseMessage The string printed after progress charaters
- # @param ProgressChar The charater used to indicate the progress
- # @param Interval The interval in seconds between two progress charaters
+ # @param OpenMessage The string printed before progress characters
+ # @param CloseMessage The string printed after progress characters
+ # @param ProgressChar The character used to indicate the progress
+ # @param Interval The interval in seconds between two progress characters
#
def __init__(self, OpenMessage="", CloseMessage="", ProgressChar='.', Interval=1.0):
self.PromptMessage = OpenMessage
@@ -788,9 +788,9 @@ class Progressor:
if Progressor._StopFlag is None:
Progressor._StopFlag = threading.Event()
- ## Start to print progress charater
+ ## Start to print progress character
#
- # @param OpenMessage The string printed before progress charaters
+ # @param OpenMessage The string printed before progress characters
#
def Start(self, OpenMessage=None):
if OpenMessage is not None:
@@ -801,9 +801,9 @@ class Progressor:
Progressor._ProgressThread.setDaemon(False)
Progressor._ProgressThread.start()
- ## Stop printing progress charater
+ ## Stop printing progress character
#
- # @param CloseMessage The string printed after progress charaters
+ # @param CloseMessage The string printed after progress characters
#
def Stop(self, CloseMessage=None):
OriginalCodaMessage = self.CodaMessage
@@ -1422,7 +1422,7 @@ class PathClass(object):
## Override __cmp__ function
#
- # Customize the comparsion operation of two PathClass
+ # Customize the comparison operation of two PathClass
#
# @retval 0 The two PathClass are different
# @retval -1 The first PathClass is less than the second PathClass
@@ -1517,7 +1517,7 @@ class PathClass(object):
self.Path = os.path.join(RealRoot, RealFile)
return ErrorCode, ErrorInfo
-## Parse PE image to get the required PE informaion.
+## Parse PE image to get the required PE information.
#
class PeImageClass():
## Constructor
diff --git a/BaseTools/Source/Python/Common/MultipleWorkspace.py b/BaseTools/Source/Python/Common/MultipleWorkspace.py
index 81594e22cd..8ad90aefaf 100644
--- a/BaseTools/Source/Python/Common/MultipleWorkspace.py
+++ b/BaseTools/Source/Python/Common/MultipleWorkspace.py
@@ -24,7 +24,7 @@ from Common.DataType import TAB_WORKSPACE
# @param class:
#
# @var WORKSPACE: defined the current WORKSPACE
-# @var PACKAGES_PATH: defined the other WORKSAPCE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
+# @var PACKAGES_PATH: defined the other WORKSPACE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
#
class MultipleWorkspace(object):
WORKSPACE = ''
@@ -146,7 +146,7 @@ class MultipleWorkspace(object):
## getPkgPath()
#
- # get all package pathes.
+ # get all package paths.
#
# @param cls The class pointer
#
diff --git a/BaseTools/Source/Python/Common/Parsing.py b/BaseTools/Source/Python/Common/Parsing.py
index 889251b699..d858501af6 100644
--- a/BaseTools/Source/Python/Common/Parsing.py
+++ b/BaseTools/Source/Python/Common/Parsing.py
@@ -36,7 +36,7 @@ def ParseDefineMacro2(Table, RecordSets, GlobalMacro):
Macros[Record[0]] = Record[1]
#
- # Overrided by Global Macros
+ # Overridden by Global Macros
#
Macros.update(GlobalMacro)
@@ -76,7 +76,7 @@ def ParseDefineMacro(Table, GlobalMacro):
Macros[Record[0]] = Record[1]
#
- # Overrided by Global Macros
+ # Overridden by Global Macros
#
Macros.update(GlobalMacro)
@@ -830,7 +830,7 @@ def InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdLi
# @param Table: The Table to be inserted
# @param FileID: The ID of belonging file
# @param Filename: The name of belonging file
-# @param CurrentSection: The name of currect section
+# @param CurrentSection: The name of current section
# @param SectionItemList: A list of items of the section
# @param ArchList: A list of arches
# @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
diff --git a/BaseTools/Source/Python/Common/RangeExpression.py b/BaseTools/Source/Python/Common/RangeExpression.py
index 40958451d2..1c52e83792 100644
--- a/BaseTools/Source/Python/Common/RangeExpression.py
+++ b/BaseTools/Source/Python/Common/RangeExpression.py
@@ -290,7 +290,7 @@ class RangeExpression(BaseExpression):
return rangeid
- def NegtiveRange(self, Oprand1):
+ def NegativeRange(self, Oprand1):
rangeContainer1 = self.operanddict[Oprand1]
@@ -332,7 +332,7 @@ class RangeExpression(BaseExpression):
if Operator in ["!", "NOT", "not"]:
if not gGuidPattern.match(Oprand1.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
- return self.NegtiveRange(Oprand1)
+ return self.NegativeRange(Oprand1)
else:
if Operator in ["==", ">=", "<=", ">", "<", '^']:
return self.EvalRange(Operator, Oprand1)
diff --git a/BaseTools/Source/Python/Common/StringUtils.py b/BaseTools/Source/Python/Common/StringUtils.py
index c6227271a4..a2e8f0f9e7 100644
--- a/BaseTools/Source/Python/Common/StringUtils.py
+++ b/BaseTools/Source/Python/Common/StringUtils.py
@@ -32,7 +32,7 @@ gHumanReadableVerPatt = re.compile(r'([1-9][0-9]*|0)\.[0-9]{1,2}$')
## GetSplitValueList
#
-# Get a value list from a string with multiple values splited with SplitTag
+# Get a value list from a string with multiple values split with SplitTag
# The default SplitTag is DataType.TAB_VALUE_SPLIT
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
#
@@ -88,7 +88,7 @@ def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
## GetSplitList
#
-# Get a value list from a string with multiple values splited with SplitString
+# Get a value list from a string with multiple values split with SplitString
# The default SplitTag is DataType.TAB_VALUE_SPLIT
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
#
@@ -123,7 +123,7 @@ def MergeArches(Dict, Key, Arch):
# Return False if invalid format
#
# @param String: String with DEFINE statement
-# @param Arch: Supportted Arch
+# @param Arch: Supported Arch
# @param Defines: DEFINE statement to be parsed
#
# @retval 0 DEFINE statement found, and valid
@@ -149,7 +149,7 @@ def GenDefines(String, Arch, Defines):
#
# @param String: String with INCLUDE statement
# @param IncludeFiles: INCLUDE statement to be parsed
-# @param Arch: Supportted Arch
+# @param Arch: Supported Arch
#
# @retval True
# @retval False
@@ -297,7 +297,7 @@ def ReplaceMacro(String, MacroDefinitions={}, SelfReplacement=False, RaiseError=
## NormPath
#
# Create a normal path
-# And replace DFEINE in the path
+# And replace DEFINE in the path
#
# @param Path: The input value for Path to be converted
# @param Defines: A set for DEFINE statement
@@ -730,9 +730,9 @@ def WorkspaceFile(WorkspaceDir, Filename):
## Split string
#
-# Revmove '"' which startswith and endswith string
+# Remove '"' which startswith and endswith string
#
-# @param String: The string need to be splited
+# @param String: The string need to be split
#
# @retval String: The string after removed '""'
#
diff --git a/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
index 336bb11671..33f306c842 100644
--- a/BaseTools/Source/Python/CommonDataClass/CommonClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
@@ -66,7 +66,7 @@ class SkuInfoClass(object):
## Convert the class to a string
#
# Convert each member of the class to string
- # Organize to a signle line format string
+ # Organize to a single line format string
#
# @retval Rtn Formatted String
#
diff --git a/BaseTools/Source/Python/CommonDataClass/DataClass.py b/BaseTools/Source/Python/CommonDataClass/DataClass.py
index 31ed46c7ec..2d93f79b09 100644
--- a/BaseTools/Source/Python/CommonDataClass/DataClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/DataClass.py
@@ -194,7 +194,7 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
-# @param ReturnStatement: ReturnStatement of a Funciont
+# @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
@@ -209,7 +209,7 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
# @var Header: Header of a Function
# @var Modifier: Modifier of a Function
# @var Name: Name of a Function
-# @var ReturnStatement: ReturnStatement of a Funciont
+# @var ReturnStatement: ReturnStatement of a Function
# @var StartLine: StartLine of a Function
# @var StartColumn: StartColumn of a Function
# @var EndLine: EndLine of a Function
diff --git a/BaseTools/Source/Python/Ecc/C.g b/BaseTools/Source/Python/Ecc/C.g
index e344088f36..4affd61ecc 100644
--- a/BaseTools/Source/Python/Ecc/C.g
+++ b/BaseTools/Source/Python/Ecc/C.g
@@ -23,7 +23,7 @@ options {
## @file
# The file defines the Lexer for C source files.
#
-# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
@@ -44,7 +44,7 @@ options {
## @file
# The file defines the parser for C source files.
#
-# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
@@ -653,11 +653,11 @@ UnicodeEscape
WS : (' '|'\r'|'\t'|'\u000C'|'\n') {$channel=HIDDEN;}
;
-// ingore '\' of line concatenation
+// ignore '\' of line concatenation
BS : ('\\') {$channel=HIDDEN;}
;
-// ingore function modifiers
+// ignore function modifiers
//FUNC_MODIFIERS : 'EFIAPI' {$channel=HIDDEN;}
// ;
diff --git a/BaseTools/Source/Python/Ecc/CParser3/CLexer.py b/BaseTools/Source/Python/Ecc/CParser3/CLexer.py
index b9e57c1b55..17c1af6113 100644
--- a/BaseTools/Source/Python/Ecc/CParser3/CLexer.py
+++ b/BaseTools/Source/Python/Ecc/CParser3/CLexer.py
@@ -6,7 +6,7 @@ from antlr3.compat import set, frozenset
## @file
# The file defines the Lexer for C source files.
#
-# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
diff --git a/BaseTools/Source/Python/Ecc/CParser3/CParser.py b/BaseTools/Source/Python/Ecc/CParser3/CParser.py
index a3ab8e7bf4..c63b0a556f 100644
--- a/BaseTools/Source/Python/Ecc/CParser3/CParser.py
+++ b/BaseTools/Source/Python/Ecc/CParser3/CParser.py
@@ -8,7 +8,7 @@ from antlr3.compat import set, frozenset
## @file
# The file defines the parser for C source files.
#
-# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py
index a6c62359d0..6e5bcee846 100644
--- a/BaseTools/Source/Python/Ecc/Check.py
+++ b/BaseTools/Source/Python/Ecc/Check.py
@@ -1403,7 +1403,7 @@ class Check(object):
# Check whether only capital letters are used for #define declarations
def NamingConventionCheckDefineStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of #define statement ...")
+ EdkLogger.quiet("Checking naming convention of #define statement ...")
SqlCommand = """select ID, Value from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_MACRO_DEFINE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
@@ -1418,7 +1418,7 @@ class Check(object):
# Check whether only capital letters are used for typedef declarations
def NamingConventionCheckTypedefStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of #typedef statement ...")
+ EdkLogger.quiet("Checking naming convention of #typedef statement ...")
SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_TYPEDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
@@ -1438,7 +1438,7 @@ class Check(object):
# Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
def NamingConventionCheckIfndefStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckIfndefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of #ifndef statement ...")
+ EdkLogger.quiet("Checking naming convention of #ifndef statement ...")
SqlCommand = """select ID, Value from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_MACRO_IFNDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
@@ -1455,7 +1455,7 @@ class Check(object):
# Check whether the path name followed the rule
def NamingConventionCheckPathName(self):
if EccGlobalData.gConfig.NamingConventionCheckPathName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of file path name ...")
+ EdkLogger.quiet("Checking naming convention of file path name ...")
Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
SqlCommand = """select ID, Name from File"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
@@ -1472,7 +1472,7 @@ class Check(object):
# Check whether the variable name followed the rule
def NamingConventionCheckVariableName(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of variable name ...")
+ EdkLogger.quiet("Checking naming convention of variable name ...")
Pattern = re.compile(r'^[A-Zgm]+\S*[a-z]\S*$')
SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_VARIABLE)
@@ -1492,7 +1492,7 @@ class Check(object):
# Check whether the function name followed the rule
def NamingConventionCheckFunctionName(self):
if EccGlobalData.gConfig.NamingConventionCheckFunctionName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of function name ...")
+ EdkLogger.quiet("Checking naming convention of function name ...")
Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
SqlCommand = """select ID, Name from Function"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
@@ -1504,7 +1504,7 @@ class Check(object):
# Check whether NO use short variable name with single character
def NamingConventionCheckSingleCharacterVariable(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
- EdkLogger.quiet("Checking naming covention of single character variable name ...")
+ EdkLogger.quiet("Checking naming convention of single character variable name ...")
SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_VARIABLE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
diff --git a/BaseTools/Source/Python/Ecc/Database.py b/BaseTools/Source/Python/Ecc/Database.py
index 9d4acac7b7..2a6b1198d8 100644
--- a/BaseTools/Source/Python/Ecc/Database.py
+++ b/BaseTools/Source/Python/Ecc/Database.py
@@ -39,7 +39,7 @@ DATABASE_PATH = "Ecc.db"
## Database
#
-# This class defined the ECC databse
+# This class defined the ECC database
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
#
@@ -83,7 +83,7 @@ class Database(object):
self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
self.Conn.execute("PRAGMA page_size=4096")
self.Conn.execute("PRAGMA synchronous=OFF")
- # to avoid non-ascii charater conversion error
+ # to avoid non-ascii character conversion error
self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
@@ -214,7 +214,7 @@ class Database(object):
## UpdateIdentifierBelongsToFunction
#
- # Update the field "BelongsToFunction" for each Indentifier
+ # Update the field "BelongsToFunction" for each Identifier
#
#
def UpdateIdentifierBelongsToFunction_disabled(self):
@@ -267,7 +267,7 @@ class Database(object):
## UpdateIdentifierBelongsToFunction
#
- # Update the field "BelongsToFunction" for each Indentifier
+ # Update the field "BelongsToFunction" for each Identifier
#
#
def UpdateIdentifierBelongsToFunction(self):
diff --git a/BaseTools/Source/Python/Ecc/EccMain.py b/BaseTools/Source/Python/Ecc/EccMain.py
index edb6c6d7d4..f06183d646 100644
--- a/BaseTools/Source/Python/Ecc/EccMain.py
+++ b/BaseTools/Source/Python/Ecc/EccMain.py
@@ -209,7 +209,7 @@ class Ecc(object):
Op.write("%s\r" % Filename)
#Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
self.MetaFile = DscParser(PathClass(Filename, Root), MODEL_FILE_DSC, MetaFileStorage(EccGlobalData.gDb.TblDsc.Cur, Filename, MODEL_FILE_DSC, True))
- # alwasy do post-process, in case of macros change
+ # always do post-process, in case of macros change
self.MetaFile.DoPostProcess()
self.MetaFile.Start()
self.MetaFile._PostProcess()
@@ -307,7 +307,7 @@ class Ecc(object):
if Options.Workspace:
os.environ["WORKSPACE"] = Options.Workspace
- # Check workspace envirnoment
+ # Check workspace environment
if "WORKSPACE" not in os.environ:
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="WORKSPACE")
diff --git a/BaseTools/Source/Python/Ecc/EccToolError.py b/BaseTools/Source/Python/Ecc/EccToolError.py
index ae0a31af8a..3f1a44c1f7 100644
--- a/BaseTools/Source/Python/Ecc/EccToolError.py
+++ b/BaseTools/Source/Python/Ecc/EccToolError.py
@@ -1,5 +1,5 @@
## @file
-# Standardized Error Hanlding infrastructures.
+# Standardized Error Handling infrastructures.
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
@@ -152,7 +152,7 @@ gEccErrorMessage = {
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 : "The #ifndef must be the first line of code following the file header comment",
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 : "The #endif must appear on the last line in the file",
ERROR_INCLUDE_FILE_CHECK_DATA : "Include files should contain only public or only private data and cannot contain code or define data variables",
- ERROR_INCLUDE_FILE_CHECK_NAME : "No permission for the inlcude file with same names",
+ ERROR_INCLUDE_FILE_CHECK_NAME : "No permission for the include file with same names",
ERROR_DECLARATION_DATA_TYPE_CHECK_ALL : "",
ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE : "There should be no use of int, unsigned, char, void, static, long in any .c, .h or .asl files",
diff --git a/BaseTools/Source/Python/Ecc/MetaDataParser.py b/BaseTools/Source/Python/Ecc/MetaDataParser.py
index 4594716886..aab2e92ea7 100644
--- a/BaseTools/Source/Python/Ecc/MetaDataParser.py
+++ b/BaseTools/Source/Python/Ecc/MetaDataParser.py
@@ -18,7 +18,7 @@ from Ecc.EccToolError import *
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Ecc import EccGlobalData
import re
-## Get the inlcude path list for a source file
+## Get the include path list for a source file
#
# 1. Find the source file belongs to which inf file
# 2. Find the inf's package
diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py
index 0c377c6c4a..7b645ff053 100644
--- a/BaseTools/Source/Python/Ecc/c.py
+++ b/BaseTools/Source/Python/Ecc/c.py
@@ -733,7 +733,7 @@ def SplitPredicateByOp(Str, Op, IsFuncCalling=False):
while not LBFound and (Str[Index].isalnum() or Str[Index] == '_'):
Index += 1
- # maybe type-cast at the begining, skip it.
+ # maybe type-cast at the beginning, skip it.
RemainingStr = Str[Index:].lstrip()
if RemainingStr.startswith(')') and not LBFound:
Index += 1
@@ -834,7 +834,7 @@ def GetDataTypeFromModifier(ModifierStr):
for M in MList:
if M in EccGlobalData.gConfig.ModifierSet:
continue
- # remove array sufix
+ # remove array suffix
if M.startswith('[') or M.endswith(']'):
continue
ReturnType += M + ' '
@@ -1019,7 +1019,7 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
Type = GetDataTypeFromModifier(Field[0:Index])
return Type.strip()
else:
- # For the condition that the field in struct is an array with [] sufixes...
+ # For the condition that the field in struct is an array with [] suffixes...
if not Field[Index + len(FieldName)].isalnum():
Type = GetDataTypeFromModifier(Field[0:Index])
return Type.strip()
@@ -1629,7 +1629,7 @@ def CheckMemberVariableFormat(Name, Value, FileTable, TdId, ModelId):
Field = Field.strip()
if Field == '':
continue
- # For the condition that the field in struct is an array with [] sufixes...
+ # For the condition that the field in struct is an array with [] suffixes...
if Field[-1] == ']':
LBPos = Field.find('[')
Field = Field[0:LBPos]
diff --git a/BaseTools/Source/Python/Eot/CParser3/CLexer.py b/BaseTools/Source/Python/Eot/CParser3/CLexer.py
index b9e57c1b55..17c1af6113 100644
--- a/BaseTools/Source/Python/Eot/CParser3/CLexer.py
+++ b/BaseTools/Source/Python/Eot/CParser3/CLexer.py
@@ -6,7 +6,7 @@ from antlr3.compat import set, frozenset
## @file
# The file defines the Lexer for C source files.
#
-# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
diff --git a/BaseTools/Source/Python/Eot/CParser3/CParser.py b/BaseTools/Source/Python/Eot/CParser3/CParser.py
index 0b74b53ae7..973e7a82b7 100644
--- a/BaseTools/Source/Python/Eot/CParser3/CParser.py
+++ b/BaseTools/Source/Python/Eot/CParser3/CParser.py
@@ -8,7 +8,7 @@ from antlr3.compat import set, frozenset
## @file
# The file defines the parser for C source files.
#
-# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
diff --git a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
index b1e77a690a..ec1faa665e 100644
--- a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
+++ b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
@@ -390,10 +390,10 @@ class CodeFragmentCollector:
print('################# ' + self.FileName + '#####################')
print('/****************************************/')
- print('/*************** ASSIGNMENTS ***************/')
+ print('/************** ASSIGNMENTS *************/')
print('/****************************************/')
- for asign in FileProfile.AssignmentExpressionList:
- print(str(asign.StartPos) + asign.Name + asign.Operator + asign.Value)
+ for assign in FileProfile.AssignmentExpressionList:
+ print(str(assign.StartPos) + assign.Name + assign.Operator + assign.Value)
print('/****************************************/')
print('/********* PREPROCESS DIRECTIVES ********/')
diff --git a/BaseTools/Source/Python/Eot/Database.py b/BaseTools/Source/Python/Eot/Database.py
index 65bac9c127..f6191161f2 100644
--- a/BaseTools/Source/Python/Eot/Database.py
+++ b/BaseTools/Source/Python/Eot/Database.py
@@ -38,7 +38,7 @@ DATABASE_PATH = "Eot.db"
## Database class
#
-# This class defined the EOT databse
+# This class defined the EOT database
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
#
@@ -83,7 +83,7 @@ class Database(object):
self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
self.Conn.execute("PRAGMA page_size=8192")
self.Conn.execute("PRAGMA synchronous=OFF")
- # to avoid non-ascii charater conversion error
+ # to avoid non-ascii character conversion error
self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
@@ -198,7 +198,7 @@ class Database(object):
## UpdateIdentifierBelongsToFunction() method
#
- # Update the field "BelongsToFunction" for each Indentifier
+ # Update the field "BelongsToFunction" for each Identifier
#
# @param self: The object pointer
#
diff --git a/BaseTools/Source/Python/Eot/EotGlobalData.py b/BaseTools/Source/Python/Eot/EotGlobalData.py
index cb6a940ab8..729b9f7c77 100644
--- a/BaseTools/Source/Python/Eot/EotGlobalData.py
+++ b/BaseTools/Source/Python/Eot/EotGlobalData.py
@@ -92,7 +92,7 @@ gConsumedProtocolLibrary['EfiLocateProtocolHandleBuffers'] = 0
gConsumedProtocolLibrary['EfiLocateProtocolInterface'] = 0
gConsumedProtocolLibrary['EfiHandleProtocol'] = 1
-# Dict for callback PROTOCOL function callling
+# Dict for callback PROTOCOL function calling
gCallbackProtocolLibrary = OrderedDict()
gCallbackProtocolLibrary['EfiRegisterProtocolCallback'] = 2
diff --git a/BaseTools/Source/Python/Eot/EotMain.py b/BaseTools/Source/Python/Eot/EotMain.py
index fd4bee6f90..e4359c5015 100644
--- a/BaseTools/Source/Python/Eot/EotMain.py
+++ b/BaseTools/Source/Python/Eot/EotMain.py
@@ -1514,7 +1514,7 @@ class Eot(object):
% (Identifier, '.NotifyPpi', '->NotifyPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
- # Find Procotols
+ # Find Protocols
ItemMode = 'Produced'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
diff --git a/BaseTools/Source/Python/Eot/InfParserLite.py b/BaseTools/Source/Python/Eot/InfParserLite.py
index 0cfe0398f0..cec083330b 100644
--- a/BaseTools/Source/Python/Eot/InfParserLite.py
+++ b/BaseTools/Source/Python/Eot/InfParserLite.py
@@ -41,8 +41,8 @@ class EdkInfParser(object):
# @param Database: Eot database
# @param SourceFileList: A list for all source file belonging this INF file
# @param SourceOverridePath: Override path for source file
- # @param Edk_Source: Envirnoment variable EDK_SOURCE
- # @param Efi_Source: Envirnoment variable EFI_SOURCE
+ # @param Edk_Source: Environment variable EDK_SOURCE
+ # @param Efi_Source: Environment variable EFI_SOURCE
#
def __init__(self, Filename = None, Database = None, SourceFileList = None, SourceOverridePath = None, Edk_Source = None, Efi_Source = None):
self.Identification = Identification()
diff --git a/BaseTools/Source/Python/Eot/Parser.py b/BaseTools/Source/Python/Eot/Parser.py
index 673088d41d..6b47409c9d 100644
--- a/BaseTools/Source/Python/Eot/Parser.py
+++ b/BaseTools/Source/Python/Eot/Parser.py
@@ -62,7 +62,7 @@ def DeCompress(Method, Input):
# @param MergeMultipleLines: Switch for if merge multiple lines
# @param LineNo: Default line no
#
-# @return Lines: The file contents after remvoing comments
+# @return Lines: The file contents after removing comments
#
def PreProcess(Filename, MergeMultipleLines = True, LineNo = -1):
Lines = []
@@ -770,7 +770,7 @@ def GetParameterName(Parameter):
# @param Table: Table to be searched
# @param Key: The keyword
#
-# @return Value: The value of the the keyword
+# @return Value: The value of the keyword
#
def FindKeyValue(Db, Table, Key):
SqlCommand = """select Value from %s where Name = '%s' and (Model = %s or Model = %s)""" % (Table, Key, MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
diff --git a/BaseTools/Source/Python/Eot/Report.py b/BaseTools/Source/Python/Eot/Report.py
index 01ad86ad3e..3e71c12393 100644
--- a/BaseTools/Source/Python/Eot/Report.py
+++ b/BaseTools/Source/Python/Eot/Report.py
@@ -332,7 +332,7 @@ class Report(object):
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
- #End of Consumed Ppi/Portocol
+ #End of Consumed Ppi/Protocol
# Find Produced Ppi/Protocol
SqlCommand = """select ModuleName, ItemType, GuidName, GuidValue, GuidMacro from Report
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py
index 63edf816ec..56294ed0d9 100644
--- a/BaseTools/Source/Python/GenFds/FdfParser.py
+++ b/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -716,13 +716,13 @@ class FdfParser:
EndPos = CurLine.find(')', StartPos+2)
while StartPos != -1 and EndPos != -1 and self._Token not in {TAB_IF_DEF, TAB_IF_N_DEF, TAB_IF, TAB_ELSE_IF}:
MacroName = CurLine[StartPos+2: EndPos]
- MacorValue = self._GetMacroValue(MacroName)
- if MacorValue is not None:
- CurLine = CurLine.replace('$(' + MacroName + ')', MacorValue, 1)
- if MacorValue.find('$(') != -1:
+ MacroValue = self._GetMacroValue(MacroName)
+ if MacroValue is not None:
+ CurLine = CurLine.replace('$(' + MacroName + ')', MacroValue, 1)
+ if MacroValue.find('$(') != -1:
PreIndex = StartPos
else:
- PreIndex = StartPos + len(MacorValue)
+ PreIndex = StartPos + len(MacroValue)
else:
PreIndex = EndPos + 1
StartPos = CurLine.find('$(', PreIndex)
@@ -1037,7 +1037,7 @@ class FdfParser:
## _GetNextToken() method
#
- # Get next token unit before a seperator
+ # Get next token unit before a separator
# If found, the string value is put into self._Token
#
# @param self The object pointer
@@ -1054,12 +1054,12 @@ class FdfParser:
StartLine = self.CurrentLineNumber
while StartLine == self.CurrentLineNumber:
TempChar = self._CurrentChar()
- # Try to find the end char that is not a space and not in seperator tuple.
+ # Try to find the end char that is not a space and not in separator tuple.
# That is, when we got a space or any char in the tuple, we got the end of token.
if not str(TempChar).isspace() and TempChar not in SEPARATORS:
self._GetOneChar()
- # if we happen to meet a seperator as the first char, we must proceed to get it.
- # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ # if we happen to meet a separator as the first char, we must proceed to get it.
+ # That is, we get a token that is a separator char. normally it is the boundary of other tokens.
elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPARATORS:
self._GetOneChar()
break
@@ -1081,7 +1081,7 @@ class FdfParser:
## _GetNextGuid() method
#
- # Get next token unit before a seperator
+ # Get next token unit before a separator
# If found, the GUID string is put into self._Token
#
# @param self The object pointer
@@ -1133,13 +1133,13 @@ class FdfParser:
while CurrentLine == self.CurrentLineNumber:
TempChar = self._CurrentChar()
- # Try to find the end char that is not a space and not in seperator tuple.
+ # Try to find the end char that is not a space and not in separator tuple.
# That is, when we got a space or any char in the tuple, we got the end of token.
if not str(TempChar).isspace() and not TempChar in SEPARATORS:
if not self._UndoOneChar():
return
- # if we happen to meet a seperator as the first char, we must proceed to get it.
- # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ # if we happen to meet a separator as the first char, we must proceed to get it.
+ # That is, we get a token that is a separator char. normally it is the boundary of other tokens.
elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPARATORS:
return
else:
@@ -1149,7 +1149,7 @@ class FdfParser:
## _GetNextHexNumber() method
#
- # Get next HEX data before a seperator
+ # Get next HEX data before a separator
# If found, the HEX data is put into self._Token
#
# @param self The object pointer
@@ -1167,7 +1167,7 @@ class FdfParser:
## _GetNextDecimalNumber() method
#
- # Get next decimal data before a seperator
+ # Get next decimal data before a separator
# If found, the decimal data is put into self._Token
#
# @param self The object pointer
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
index 6dcb57deed..84506ed70e 100644
--- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -190,7 +190,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
#
- # Set Ffs BaseName, MdouleGuid, ModuleType, Version, OutputPath
+ # Set Ffs BaseName, ModuleGuid, ModuleType, Version, OutputPath
#
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
@@ -352,7 +352,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
GenFdsGlobalVariable.VerboseLogger("InfFileName :%s" % self.InfFileName)
#
- # Set OutputPath = ${WorkSpace}\Build\Fv\Ffs\${ModuleGuid}+ ${MdouleName}\
+ # Set OutputPath = ${WorkSpace}\Build\Fv\Ffs\${ModuleGuid}+ ${ModuleName}\
#
self.OutputPath = os.path.join(GenFdsGlobalVariable.FfsDir, \
diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py
index 2ae991128a..06d853b607 100644
--- a/BaseTools/Source/Python/GenFds/Fv.py
+++ b/BaseTools/Source/Python/GenFds/Fv.py
@@ -262,7 +262,7 @@ class FV (object):
## _InitializeInf()
#
- # Initilize the inf file to create FV
+ # Initialize the inf file to create FV
#
# @param self The object pointer
# @param BaseAddress base address of FV
diff --git a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
index 6cea885853..37c7e91e83 100644
--- a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
+++ b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
@@ -42,7 +42,7 @@ __usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
EFI_HASH_ALGORITHM_SHA256_GUID = uuid.UUID('{51aa59de-fdf2-4ea3-bc63-875fb7842ee9}')
#
-# Structure defintion to unpack EFI_CERT_BLOCK_RSA_2048_SHA256 from UEFI 2.4 Specification
+# Structure definition to unpack EFI_CERT_BLOCK_RSA_2048_SHA256 from UEFI 2.4 Specification
#
# typedef struct _EFI_CERT_BLOCK_RSA_2048_SHA256 {
# EFI_GUID HashType;
diff --git a/BaseTools/Source/Python/Table/Table.py b/BaseTools/Source/Python/Table/Table.py
index e89b99320d..d8ffc72dab 100644
--- a/BaseTools/Source/Python/Table/Table.py
+++ b/BaseTools/Source/Python/Table/Table.py
@@ -52,7 +52,7 @@ class Table(object):
# Query all records of the table
#
def Query(self):
- EdkLogger.verbose("\nQuery tabel %s started ..." % self.Table)
+ EdkLogger.verbose("\nQuery table %s started ..." % self.Table)
SqlCommand = """select * from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Rs in self.Cur:
diff --git a/BaseTools/Source/Python/Table/TableFunction.py b/BaseTools/Source/Python/Table/TableFunction.py
index ee677cf107..9d43ff0947 100644
--- a/BaseTools/Source/Python/Table/TableFunction.py
+++ b/BaseTools/Source/Python/Table/TableFunction.py
@@ -38,7 +38,7 @@ class TableFunction(Table):
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
- # @param ReturnStatement: ReturnStatement of a Funciont
+ # @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
@@ -75,7 +75,7 @@ class TableFunction(Table):
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
- # @param ReturnStatement: ReturnStatement of a Funciont
+ # @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
diff --git a/BaseTools/Source/Python/Table/TableQuery.py b/BaseTools/Source/Python/Table/TableQuery.py
index 8e7d313c71..b203bf88e8 100644
--- a/BaseTools/Source/Python/Table/TableQuery.py
+++ b/BaseTools/Source/Python/Table/TableQuery.py
@@ -37,7 +37,7 @@ class TableQuery(Table):
#
# @param ID: ID of a Query
# @param Name: Name of a Query
- # @param Modifer: Modifier of a Query
+ # @param Modifier: Modifier of a Query
# @param Value: Type of a Query
# @param Model: Model of a Query
#
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py
index a49a01f9e3..5ff54160aa 100644
--- a/BaseTools/Source/Python/TargetTool/TargetTool.py
+++ b/BaseTools/Source/Python/TargetTool/TargetTool.py
@@ -143,14 +143,14 @@ def GetConfigureKeyValue(self, Key):
if os.path.exists(dscFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
else:
- EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ EdkLogger.error("TargetTool", BuildToolError.FILE_NOT_FOUND,
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
if os.path.exists(tooldefFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
else:
- EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ EdkLogger.error("TargetTool", BuildToolError.FILE_NOT_FOUND,
"Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
elif self.Opt.NUM >= 2:
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py
index 428bf0d681..825ed3e5d5 100644
--- a/BaseTools/Source/Python/Trim/Trim.py
+++ b/BaseTools/Source/Python/Trim/Trim.py
@@ -168,7 +168,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
InjectedFile = MatchList[1]
InjectedFile = os.path.normpath(InjectedFile)
InjectedFile = os.path.normcase(InjectedFile)
- # The first injetcted file must be the preprocessed file itself
+ # The first injected file must be the preprocessed file itself
if PreprocessedFile == "":
PreprocessedFile = InjectedFile
LineControlDirectiveFound = True
diff --git a/BaseTools/Source/Python/UPT/Core/DependencyRules.py b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
index 9c3baa1b12..89b81f8b1f 100644
--- a/BaseTools/Source/Python/UPT/Core/DependencyRules.py
+++ b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
@@ -159,7 +159,7 @@ class DependencyRules(object):
#
# @param PkgObj: A package object
# @param DpObj: A distribution object
- # @return: True if package depex satisified
+ # @return: True if package depex satisfied
# False else
#
def CheckPackageDepexSatisfied(self, PkgObj, DpObj=None):
diff --git a/BaseTools/Source/Python/UPT/Core/IpiDb.py b/BaseTools/Source/Python/UPT/Core/IpiDb.py
index 48defeac7e..bedf26321e 100644
--- a/BaseTools/Source/Python/UPT/Core/IpiDb.py
+++ b/BaseTools/Source/Python/UPT/Core/IpiDb.py
@@ -735,8 +735,8 @@ class IpiDatabase(object):
## Get a list of module information that comes from DP.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
#
def GetSModInsPathListFromDp(self, DpGuid, DpVersion):
@@ -754,8 +754,8 @@ class IpiDatabase(object):
## Get a list of package information.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
#
def GetPackageListFromDp(self, DpGuid, DpVersion):
@@ -774,8 +774,8 @@ class IpiDatabase(object):
## Get a list of modules that depends on package information from a DP.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
#
def GetDpDependentModuleList(self, DpGuid, DpVersion):
@@ -831,8 +831,8 @@ class IpiDatabase(object):
## Get Dp's list of modules.
#
- # @param DpGuid: A Distrabution Guid
- # @param DpVersion: A Distrabution version
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
#
def GetDpModuleList(self, DpGuid, DpVersion):
ModList = []
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
index e2dd6a02dc..63c6da0fec 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
@@ -646,8 +646,8 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
File = codecs.open(ContainerFile, 'w', Encoding)
File.write(u'\uFEFF' + Content)
File.stream.close()
- Md5Sigature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
- Md5Sum = Md5Sigature.hexdigest()
+ Md5Signature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
if (ContainerFile, Md5Sum) not in PackageObject.FileList:
PackageObject.FileList.append((ContainerFile, Md5Sum))
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
index 1f8b3f163e..40346b0b3a 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
@@ -255,8 +255,8 @@ def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCOD
File = codecs.open(ContainerFile, 'wb', Encoding)
File.write(u'\uFEFF' + Content)
File.stream.close()
- Md5Sigature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
- Md5Sum = Md5Sigature.hexdigest()
+ Md5Signature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
if (ContainerFile, Md5Sum) not in ModuleObject.FileList:
ModuleObject.FileList.append((ContainerFile, Md5Sum))
diff --git a/BaseTools/Source/Python/UPT/InstallPkg.py b/BaseTools/Source/Python/UPT/InstallPkg.py
index c553d707fd..cbc54f9407 100644
--- a/BaseTools/Source/Python/UPT/InstallPkg.py
+++ b/BaseTools/Source/Python/UPT/InstallPkg.py
@@ -176,8 +176,8 @@ def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
# verify MD5 signature when existed
#
if DistPkg.Header.Signature != '':
- Md5Sigature = md5(__FileHookOpen__(ContentFile, 'rb').read())
- if DistPkg.Header.Signature != Md5Sigature.hexdigest():
+ Md5Signature = md5(__FileHookOpen__(ContentFile, 'rb').read())
+ if DistPkg.Header.Signature != Md5Signature.hexdigest():
ContentZipFile.Close()
Logger.Error("InstallPkg", FILE_CHECKSUM_FAILURE,
ExtraData=ContentFile)
@@ -215,8 +215,8 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
#
for Package in PackageList:
FilePath = PackageToDec(Package, DistPkg.Header)
- Md5Sigature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
- Md5Sum = Md5Sigature.hexdigest()
+ Md5Signature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
if (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
@@ -275,8 +275,8 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
for (Module, Package) in ModuleList:
CheckCNameInModuleRedefined(Module, DistPkg)
FilePath = ModuleToInf(Module, Package, DistPkg.Header)
- Md5Sigature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
- Md5Sum = Md5Sigature.hexdigest()
+ Md5Signature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
if Package:
if (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
@@ -803,8 +803,8 @@ def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
else:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
- Md5Sigature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
- Md5Sum = Md5Sigature.hexdigest()
+ Md5Signature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
return Md5Sum
@@ -876,8 +876,8 @@ def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
else:
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
- Md5Sigature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
- Md5Sum = Md5Sigature.hexdigest()
+ Md5Signature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
Package.SetIncludeArchList(PackageIncludeArchList)
diff --git a/BaseTools/Source/Python/UPT/Library/CommentParsing.py b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
index a09a530ffb..250ba2dd5e 100644
--- a/BaseTools/Source/Python/UPT/Library/CommentParsing.py
+++ b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
@@ -426,7 +426,7 @@ def _CheckListExpression(Expression):
return IsValidListExpr(ListExpr)
-## _CheckExpreesion
+## _CheckExpression
#
# @param Expression: Pcd value expression
#
@@ -476,11 +476,11 @@ def _ValidateCopyright(Line):
def GenerateTokenList (Comment):
#
- # Tokenize Comment using '#' and ' ' as token seperators
+ # Tokenize Comment using '#' and ' ' as token separators
#
- RelplacedComment = None
- while Comment != RelplacedComment:
- RelplacedComment = Comment
+ ReplacedComment = None
+ while Comment != ReplacedComment:
+ ReplacedComment = Comment
Comment = Comment.replace('##', '#').replace(' ', ' ').replace(' ', '#').strip('# ')
return Comment.split('#')
@@ -538,13 +538,13 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
NumTokens = 1
#
- # Initialze HelpText to Comment.
+ # Initialize HelpText to Comment.
# Content will be remove from HelpText as matching tokens are found
#
HelpText = Comment
#
- # Tokenize Comment using '#' and ' ' as token seperators
+ # Tokenize Comment using '#' and ' ' as token separators
#
List = GenerateTokenList (Comment)
diff --git a/BaseTools/Source/Python/UPT/Library/DataType.py b/BaseTools/Source/Python/UPT/Library/DataType.py
index 97ca835882..07673694b2 100644
--- a/BaseTools/Source/Python/UPT/Library/DataType.py
+++ b/BaseTools/Source/Python/UPT/Library/DataType.py
@@ -93,7 +93,7 @@ TAB_STR_TOKENHELP = 'HELP'
TAB_STR_TOKENERR = 'ERR'
#
-# Dictionary of usage tokens and their synonmys
+# Dictionary of usage tokens and their synonyms
#
ALL_USAGE_TOKENS = {
"PRODUCES" : "PRODUCES",
diff --git a/BaseTools/Source/Python/UPT/Library/GlobalData.py b/BaseTools/Source/Python/UPT/Library/GlobalData.py
index 40b17cf083..6b093873dd 100644
--- a/BaseTools/Source/Python/UPT/Library/GlobalData.py
+++ b/BaseTools/Source/Python/UPT/Library/GlobalData.py
@@ -43,7 +43,7 @@ gINVALID_MODULE_FILE = gUPT_DIR + r"Invalid_Modules.log"
gCONTENT_FILE = "dist.content"
#
-# File name for XML file in the distibution
+# File name for XML file in the distribution
#
gDESC_FILE = 'dist.pkg'
diff --git a/BaseTools/Source/Python/UPT/Library/Misc.py b/BaseTools/Source/Python/UPT/Library/Misc.py
index d69b161420..e7ee27cc53 100644
--- a/BaseTools/Source/Python/UPT/Library/Misc.py
+++ b/BaseTools/Source/Python/UPT/Library/Misc.py
@@ -788,7 +788,7 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo):
FileGuidString = ""
VerString = ""
- OrignalString = String
+ OriginalString = String
String = String.strip()
if not String:
return None, None
@@ -808,7 +808,7 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo):
ST.ERR_FILELIST_EXIST % (String),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
- ExtraData=OrignalString)
+ ExtraData=OriginalString)
#
# Validate file exist/format.
@@ -821,7 +821,7 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo):
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (String),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
- ExtraData=OrignalString)
+ ExtraData=OriginalString)
return False
if IsValidFileFlag:
FileLinesList = []
@@ -973,7 +973,7 @@ def ValidateUNIFilePath(Path):
ExtraData=Path)
#
- # Check if '..' in the file name(without suffixe)
+ # Check if '..' in the file name(without suffix)
#
if (TAB_SPLIT + TAB_SPLIT) in Path:
Logger.Error("Unicode File Parser",
diff --git a/BaseTools/Source/Python/UPT/Library/ParserValidate.py b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
index 87d156fa4c..1408aec242 100644
--- a/BaseTools/Source/Python/UPT/Library/ParserValidate.py
+++ b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
@@ -13,7 +13,7 @@
#
'''
-PaserValidate
+ParserValidate
'''
import os.path
@@ -68,7 +68,7 @@ def IsValidHex(HexStr):
#
def IsValidBoolType(BoolString):
#
- # Valid Ture
+ # Valid True
#
if BoolString == 'TRUE' or \
BoolString == 'True' or \
@@ -124,11 +124,11 @@ def IsValidInfComponentType(ComponentType):
## Is valid Tool Family or not
#
# @param ToolFamily: A string contain Tool Family need to be judged.
-# Famlily := [A-Z]([a-zA-Z0-9])*
+# Family := [A-Z]([a-zA-Z0-9])*
#
def IsValidToolFamily(ToolFamily):
- ReIsValieFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
- if ReIsValieFamily.match(ToolFamily) is None:
+ ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
+ if ReIsValidFamily.match(ToolFamily) is None:
return False
return True
@@ -158,8 +158,8 @@ def IsValidToolTagName(TagName):
def IsValidArch(Arch):
if Arch == 'common':
return True
- ReIsValieArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
- if ReIsValieArch.match(Arch) is None:
+ ReIsValidArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
+ if ReIsValidArch.match(Arch) is None:
return False
return True
diff --git a/BaseTools/Source/Python/UPT/Library/Parsing.py b/BaseTools/Source/Python/UPT/Library/Parsing.py
index 3eca8e3849..f7e995b93e 100644
--- a/BaseTools/Source/Python/UPT/Library/Parsing.py
+++ b/BaseTools/Source/Python/UPT/Library/Parsing.py
@@ -959,7 +959,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
# INF, DEC specs
# @param SectionDict: section statement dict, key is SectionAttrs(arch,
# moduletype or platform may exist as needed) list
-# seperated by space,
+# separated by space,
# value is statement
#
def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
diff --git a/BaseTools/Source/Python/UPT/Library/StringUtils.py b/BaseTools/Source/Python/UPT/Library/StringUtils.py
index a3391daa91..6fa1ee89d4 100644
--- a/BaseTools/Source/Python/UPT/Library/StringUtils.py
+++ b/BaseTools/Source/Python/UPT/Library/StringUtils.py
@@ -33,7 +33,7 @@ gMACRO_PATTERN = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
## GetSplitValueList
#
-# Get a value list from a string with multiple values splited with SplitTag
+# Get a value list from a string with multiple values split with SplitTag
# The default SplitTag is DataType.TAB_VALUE_SPLIT
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
#
@@ -67,7 +67,7 @@ def MergeArches(Dict, Key, Arch):
# Return False if invalid format
#
# @param String: String with DEFINE statement
-# @param Arch: Supportted Arch
+# @param Arch: Supported Arch
# @param Defines: DEFINE statement to be parsed
#
def GenDefines(String, Arch, Defines):
@@ -236,7 +236,7 @@ def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None
## NormPath
#
# Create a normal path
-# And replace DFEINE in the path
+# And replace DEFINE in the path
#
# @param Path: The input value for Path to be converted
# @param Defines: A set for DEFINE statement
@@ -613,9 +613,9 @@ def WorkspaceFile(WorkspaceDir, Filename):
## Split string
#
-# Revmove '"' which startswith and endswith string
+# Remove '"' which startswith and endswith string
#
-# @param String: The string need to be splited
+# @param String: The string need to be split
#
def SplitString(String):
if String.startswith('\"'):
@@ -734,7 +734,7 @@ def IsHexDigit(Str):
return False
return False
-## Check if the string is HexDgit and its interger value within limit of UINT32
+## Check if the string is HexDgit and its integer value within limit of UINT32
#
# Return true if all characters in the string are digits and there is at
# least one character
diff --git a/BaseTools/Source/Python/UPT/Library/UniClassObject.py b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
index bd7804b753..d575c6b1f8 100644
--- a/BaseTools/Source/Python/UPT/Library/UniClassObject.py
+++ b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
@@ -130,12 +130,12 @@ def ConvertSpecialUnicodes(Uni):
## GetLanguageCode1766
#
# Check the language code read from .UNI file and convert RFC 4646 codes to RFC 1766 codes
-# RFC 1766 language codes supported in compatiblity mode
+# RFC 1766 language codes supported in compatibility mode
# RFC 4646 language codes supported in native mode
#
# @param LangName: Language codes read from .UNI file
#
-# @retval LangName: Valid lanugage code in RFC 1766 format or None
+# @retval LangName: Valid language code in RFC 1766 format or None
#
def GetLanguageCode1766(LangName, File=None):
return LangName
@@ -177,7 +177,7 @@ def GetLanguageCode1766(LangName, File=None):
## GetLanguageCode
#
# Check the language code read from .UNI file and convert RFC 1766 codes to RFC 4646 codes if appropriate
-# RFC 1766 language codes supported in compatiblity mode
+# RFC 1766 language codes supported in compatibility mode
# RFC 4646 language codes supported in native mode
#
# @param LangName: Language codes read from .UNI file
@@ -221,13 +221,13 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
## FormatUniEntry
#
-# Formated the entry in Uni file.
+# Formatted the entry in Uni file.
#
# @param StrTokenName StrTokenName.
# @param TokenValueList A list need to be processed.
# @param ContainerFile ContainerFile.
#
-# @return formated entry
+# @return formatted entry
def FormatUniEntry(StrTokenName, TokenValueList, ContainerFile):
SubContent = ''
PreFormatLength = 40
@@ -478,8 +478,8 @@ class UniFileClassObject(object):
MultiLineFeedExits = False
#
# 0: initial value
- # 1: signle String entry exist
- # 2: line feed exist under the some signle String entry
+ # 1: single String entry exist
+ # 2: line feed exist under the some single String entry
#
StringEntryExistsFlag = 0
for Line in FileIn:
@@ -497,7 +497,7 @@ class UniFileClassObject(object):
StringEntryExistsFlag = 2
#
# If the '#string' line and the '#language' line are not in the same line,
- # there should be only one line feed character betwwen them
+ # there should be only one line feed character between them
#
if MultiLineFeedExits:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
@@ -505,7 +505,7 @@ class UniFileClassObject(object):
MultiLineFeedExits = False
#
- # Process comment embeded in string define lines
+ # Process comment embedded in string define lines
#
FindFlag = Line.find(u'//')
if FindFlag != -1 and Line.find(u'//') < Line.find(u'"'):
@@ -771,7 +771,7 @@ class UniFileClassObject(object):
#
# Check Abstract, Description, BinaryAbstract and BinaryDescription order,
- # should be Abstract, Description, BinaryAbstract, BinaryDesctiption
+ # should be Abstract, Description, BinaryAbstract, BinaryDescription
AbstractPosition = -1
DescriptionPosition = -1
BinaryAbstractPosition = -1
diff --git a/BaseTools/Source/Python/UPT/Logger/Log.py b/BaseTools/Source/Python/UPT/Logger/Log.py
index e8c31f0072..7671202b30 100644
--- a/BaseTools/Source/Python/UPT/Logger/Log.py
+++ b/BaseTools/Source/Python/UPT/Logger/Log.py
@@ -190,7 +190,7 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
_INFO_LOGGER.log(WARN, LogText)
#
- # Raise an execption if indicated
+ # Raise an exception if indicated
#
if GlobalData.gWARNING_AS_ERROR == True:
raise FatalError(WARNING_AS_ERROR)
@@ -198,7 +198,7 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
## Log ERROR message
#
# Once an error messages is logged, the tool's execution will be broken by
-# raising an execption. If you don't want to break the execution later, you
+# raising an exception. If you don't want to break the execution later, you
# can give "RaiseError" with "False" value.
#
# @param ToolName The name of the tool. If not given, the name of caller
@@ -208,7 +208,7 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
# @param File The name of file which caused the error.
# @param Line The line number in the "File" which caused the warning.
# @param ExtraData More information associated with "Message"
-# @param RaiseError Raise an exception to break the tool's executuion if
+# @param RaiseError Raise an exception to break the tool's execution if
# it's True. This is the default behavior.
#
def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
diff --git a/BaseTools/Source/Python/UPT/Logger/StringTable.py b/BaseTools/Source/Python/UPT/Logger/StringTable.py
index 061943925a..d815d9cf89 100644
--- a/BaseTools/Source/Python/UPT/Logger/StringTable.py
+++ b/BaseTools/Source/Python/UPT/Logger/StringTable.py
@@ -135,7 +135,7 @@ ERR_INF_PARSER_LIBRARY_SECTION_CONTENT_ERROR = \
ERR_INF_PARSER_LIBRARY_SECTION_LIBNAME_MISSING = \
_("Format invalid. Please specify a library name.")
ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR = \
- _("The format is incorrect. It should be formated as follows: "
+ _("The format is incorrect. It should be formatted as follows: "
"FileName, Family | TagName | ToolCode | FeatureFlagExpr.")
ERR_INF_PARSER_PCD_SECTION_TYPE_ERROR = \
_("The PCD section type is incorrect. The value should be this list: %s")
@@ -147,10 +147,10 @@ ERR_INF_PARSER_PCD_NAME_FORMAT_ERROR = \
"Should like following: <TokenSpaceGuidCName>.<PcdCName> ")
ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR = \
_("The format is incorrect. "
- "It should be formated as follows: CName | FeatureFlag.")
+ "It should be formatted as follows: CName | FeatureFlag.")
ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR = \
_("The format is incorrect. "
- "It should be formated as follows: <TokenSpaceGuidCName>.<PcdCName>")
+ "It should be formatted as follows: <TokenSpaceGuidCName>.<PcdCName>")
ERR_INF_PARSER_PCD_TAIL_COMMENTS_INVALID = \
_("The format is incorrect. "
"Multiple usage descriptions must be described on subsequent lines.")
@@ -197,7 +197,7 @@ ERR_INF_PARSER_FILE_MISS_DEFINE = \
_("The following file listed in the module "
"directory is not listed in the INF: %s")
ERR_INF_PARSER_VERSION_NUMBER_DEPRICATED = \
- _("VERSION_NUMBER depricated. "
+ _("VERSION_NUMBER deprecated. "
"The INF file %s should be modified to use the VERSION_STRING instead.")
ERR_INF_PARSER_VER_EXIST_BOTH_NUM_STR = \
_("The INF file %s defines both VERSION_NUMBER and VERSION_STRING, "
@@ -585,7 +585,7 @@ _("Only a distribution file name without a path is allowed for "
"the distribution to be replaced during replace. Current given: '%s'.")
ERR_UNIPARSE_DBLQUOTE_UNMATCHED = \
_("Only Language entry can contain a couple of matched quote in one line")
-ERR_UNIPARSE_NO_SECTION_EXIST = _("No PakcageDef or ModuleDef section exists in the UNI file.")
+ERR_UNIPARSE_NO_SECTION_EXIST = _("No PackageDef or ModuleDef section exists in the UNI file.")
ERR_UNIPARSE_STRNAME_FORMAT_ERROR = _("The String Token Name %s must start with \"STR_\"")
ERR_UNIPARSE_SEP_LANGENTRY_LINE = _("Each <LangEntry> should be in a separate line :%s.")
ERR_UNIPARSE_MULTI_ENTRY_EXIST = \
@@ -724,7 +724,7 @@ _("Incorrect GUID value format, must be <GuidValueInCFormat:"
ERR_DECPARSE_CGUID_NOT_FOUND = _("Unable to find the GUID value of this GUID CName : '%s'.")
ERR_DECPARSE_FILEOPEN = _("Unable to open: [%s].")
ERR_DECPARSE_SECTION_EMPTY = _("Empty sections are not allowed.")
-ERR_DECPARSE_SECTION_UE = _("Incorrect UserExtentions format. "
+ERR_DECPARSE_SECTION_UE = _("Incorrect UserExtensions format. "
"Must be UserExtenxions.UserId.IdString[.Arch]+.")
ERR_DECPARSE_SECTION_UE_USERID = _("Invalid UserId, must be underscore"
"or alphanumeric characters.")
diff --git a/BaseTools/Source/Python/UPT/Logger/ToolError.py b/BaseTools/Source/Python/UPT/Logger/ToolError.py
index 7f7df9c6d5..a074edca8a 100644
--- a/BaseTools/Source/Python/UPT/Logger/ToolError.py
+++ b/BaseTools/Source/Python/UPT/Logger/ToolError.py
@@ -1,5 +1,5 @@
## @file
-# Standardized Error Hanlding infrastructures.
+# Standardized Error Handling infrastructures.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
diff --git a/BaseTools/Source/Python/UPT/MkPkg.py b/BaseTools/Source/Python/UPT/MkPkg.py
index 053d115547..3dc18ac6eb 100644
--- a/BaseTools/Source/Python/UPT/MkPkg.py
+++ b/BaseTools/Source/Python/UPT/MkPkg.py
@@ -194,7 +194,7 @@ def Main(Options = None):
ContentFileClosed = True
#
- # Add Md5Sigature
+ # Add Md5Signature
#
DistPkg.Header.Signature = md5(open(str(ContentFile), 'rb').read()).hexdigest()
#
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py b/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
index 302d3050aa..fab4be351f 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
@@ -171,7 +171,7 @@ class _DecItemBaseObject(_DecComments):
## DecDefineObject
#
-# Class to hold define section infomation
+# Class to hold define section information
#
class DecDefineObject(_DecBaseObject):
def __init__(self, PkgFullName):
@@ -520,7 +520,7 @@ class DecGuidObject(DecGuidObjectBase):
## DecPpiObject
#
-# Class for PPI seciont
+# Class for PPI section
#
# @param DecGuidObjectBase: Dec Guid Object Base
#
diff --git a/BaseTools/Source/Python/UPT/Parser/DecParser.py b/BaseTools/Source/Python/UPT/Parser/DecParser.py
index f7eeb84127..92bd64b9d9 100644
--- a/BaseTools/Source/Python/UPT/Parser/DecParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/DecParser.py
@@ -705,7 +705,7 @@ class _DecGuid(_DecBase):
## _DecUserExtension
#
-# Parse user extention section
+# Parse user extension section
#
class _DecUserExtension(_DecBase):
def __init__(self, RawData):
@@ -1033,7 +1033,7 @@ class Dec(_DecBase, _DecComments):
SectionNames.append(SectionName)
#
# In DEC specification, all section headers have at most two part:
- # SectionName.Arch except UserExtention
+ # SectionName.Arch except UserExtension
#
if len(ItemList) > 2:
self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBTOOMANY % Item)
diff --git a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
index c314892adf..53b62621dc 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
@@ -46,7 +46,7 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo, CurrentInfFileName):
FileGuidString = ""
VerString = ""
- OrignalString = String
+ OriginalString = String
String = String.strip()
if not String:
return None, None
@@ -78,7 +78,7 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo, CurrentInfFileName):
#
# To deal with library instance specified by file name
#
- FileLinesList = GetFileLineContent(String, WorkSpace, LineNo, OrignalString)
+ FileLinesList = GetFileLineContent(String, WorkSpace, LineNo, OriginalString)
ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
index 5dc00affad..a7b59d958c 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
@@ -413,7 +413,7 @@ class DecPomAlignment(PackageObject):
#
# get a non-overlap set of include path, IncludePathList should be
- # sorted, and path should be end with path seperator '\'
+ # sorted, and path should be end with path separator '\'
#
NonOverLapList = []
for Path1 in IncludePathList:
@@ -424,7 +424,7 @@ class DecPomAlignment(PackageObject):
NonOverLapList.append(Path1)
#
# revert the list so the longest path shown first in list, also need
- # to remove the extra path seperator '\'
+ # to remove the extra path separator '\'
# as this list is used to search the supported Arch info
#
for IndexN in range (0, len(IncludePathList)):
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
index 2e83c247ed..d01481727b 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
@@ -293,7 +293,7 @@ class InfPomAlignment(ModuleObject):
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
#
# Get all meta-file header information
- # the record is list of items formated:
+ # the record is list of items formatted:
# [LineValue, Arch, StartLine, ID, Third]
#
InfHeaderObj = self.Parser.InfHeader
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
index 239078d019..68f281b521 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
@@ -155,10 +155,10 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
## Generate the define statement that will be put into userextension
# Not support comments.
#
-# @param HeaderComment: the original header comment (# not remvoed)
+# @param HeaderComment: the original header comment (# not removed)
# @param Name: the definition keyword, should not be empty or none
# @param Value: the definition keyword value
-# @param TailComment: the original Tail comment (# not remvoed)
+# @param TailComment: the original Tail comment (# not removed)
#
# @return: the regenerated define statement
#
diff --git a/BaseTools/Source/Python/UPT/RmPkg.py b/BaseTools/Source/Python/UPT/RmPkg.py
index f1d141c736..3d97295362 100644
--- a/BaseTools/Source/Python/UPT/RmPkg.py
+++ b/BaseTools/Source/Python/UPT/RmPkg.py
@@ -242,8 +242,8 @@ def RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, ForceRemov
#
# check whether modified by users
#
- Md5Sigature = md5(open(str(Path), 'rb').read())
- if Md5Sum != Md5Sigature.hexdigest():
+ Md5Signature = md5(open(str(Path), 'rb').read())
+ if Md5Sum != Md5Signature.hexdigest():
Logger.Info(ST.MSG_CONFIRM_REMOVE2 % Path)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
diff --git a/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py b/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
index afea4a438b..752a9ea446 100644
--- a/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
+++ b/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
@@ -61,9 +61,9 @@ class MacroParserTestCase(unittest.TestCase):
self.dec = _DecBase(FileContent('dummy', []))
def testCorrectMacro(self):
- self.dec._MacroParser('DEFINE MARCRO1 = test1')
- self.failIf('MARCRO1' not in self.dec._LocalMacro)
- self.assertEqual(self.dec._LocalMacro['MARCRO1'], 'test1')
+ self.dec._MacroParser('DEFINE MACRO1 = test1')
+ self.failIf('MACRO1' not in self.dec._LocalMacro)
+ self.assertEqual(self.dec._LocalMacro['MACRO1'], 'test1')
def testErrorMacro1(self):
# Raise fatal error, macro name must be upper case letter
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py
index cff77a71ae..1df042f41c 100644
--- a/BaseTools/Source/Python/Workspace/BuildClassObject.py
+++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py
@@ -176,7 +176,7 @@ class PcdClassObject(object):
## Convert the class to a string
#
# Convert each member of the class to string
- # Organize to a signle line format string
+ # Organize to a single line format string
#
# @retval Rtn Formatted String
#
diff --git a/BaseTools/Source/Python/Workspace/DecBuildData.py b/BaseTools/Source/Python/Workspace/DecBuildData.py
index 36b39be5d5..149c057b70 100644
--- a/BaseTools/Source/Python/Workspace/DecBuildData.py
+++ b/BaseTools/Source/Python/Workspace/DecBuildData.py
@@ -121,7 +121,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve all information in [Defines] section
#
- # (Retriving all [Defines] information in one-shot is just to save time.)
+ # (Retrieving all [Defines] information in one-shot is just to save time.)
#
def _GetHeaderInfo(self):
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
@@ -167,7 +167,7 @@ class DecBuildData(PackageBuildClassObject):
if self._Protocols is None:
#
# tdict is a special kind of dict, used for selecting correct
- # protocol defition for given ARCH
+ # protocol definition for given ARCH
#
ProtocolDict = tdict(True)
PrivateProtocolDict = tdict(True)
@@ -210,7 +210,7 @@ class DecBuildData(PackageBuildClassObject):
if self._Ppis is None:
#
# tdict is a special kind of dict, used for selecting correct
- # PPI defition for given ARCH
+ # PPI definition for given ARCH
#
PpiDict = tdict(True)
PrivatePpiDict = tdict(True)
@@ -253,7 +253,7 @@ class DecBuildData(PackageBuildClassObject):
if self._Guids is None:
#
# tdict is a special kind of dict, used for selecting correct
- # GUID defition for given ARCH
+ # GUID definition for given ARCH
#
GuidDict = tdict(True)
PrivateGuidDict = tdict(True)
diff --git a/BaseTools/Source/Python/Workspace/DscBuildData.py b/BaseTools/Source/Python/Workspace/DscBuildData.py
index f472fa177f..1fd1639ab6 100644
--- a/BaseTools/Source/Python/Workspace/DscBuildData.py
+++ b/BaseTools/Source/Python/Workspace/DscBuildData.py
@@ -343,7 +343,7 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve all information in [Defines] section
#
- # (Retriving all [Defines] information in one-shot is just to save time.)
+ # (Retrieving all [Defines] information in one-shot is just to save time.)
#
def _GetHeaderInfo(self):
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
diff --git a/BaseTools/Source/Python/Workspace/InfBuildData.py b/BaseTools/Source/Python/Workspace/InfBuildData.py
index fc779a9d25..6148ab0d30 100644
--- a/BaseTools/Source/Python/Workspace/InfBuildData.py
+++ b/BaseTools/Source/Python/Workspace/InfBuildData.py
@@ -205,7 +205,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve all information in [Defines] section
#
- # (Retriving all [Defines] information in one-shot is just to save time.)
+ # (Retrieving all [Defines] information in one-shot is just to save time.)
#
@cached_class_function
def _GetHeaderInfo(self):
@@ -816,7 +816,7 @@ class InfBuildData(ModuleBuildClassObject):
RetVal[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType]
return RetVal
- ## Retrieve depedency expression
+ ## Retrieve dependency expression
@cached_property
def DepexExpression(self):
RetVal = tdict(False, 2)
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
index 311d0faf38..a3e3216221 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -438,7 +438,7 @@ class MetaFileParser(object):
ScopeKey = tuple(ScopeKey)
#
# DecParser SectionType is a list, will contain more than one item only in Pcd Section
- # As Pcd section macro usage is not alllowed, so here it is safe
+ # As Pcd section macro usage is not allowed, so here it is safe
#
if isinstance(self, DecParser):
SectionDictKey = self._SectionType[0], ScopeKey
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
index ee238e5143..b79280bc2e 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
@@ -247,7 +247,7 @@ def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolcha
SortedLibraryList.append(Item)
#
- # Build the list of constructor and destructir names
+ # Build the list of constructor and destructor names
# The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
#
SortedLibraryList.reverse()
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
index a6a292d15c..921a229a1c 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -37,7 +37,7 @@ from Workspace.InfBuildData import InfBuildData
#
# @param DbPath Path of database file
# @param GlobalMacros Global macros used for replacement during file parsing
-# @prarm RenewDb=False Create new database file if it's already there
+# @param RenewDb=False Create new database file if it's already there
#
class WorkspaceDatabase(object):
@@ -82,7 +82,7 @@ class WorkspaceDatabase(object):
Arch = None
return (FilePath, Arch) in self._CACHE_
- # key = (FilePath, Arch=None, Target=None, Toochain=None)
+ # key = (FilePath, Arch=None, Target=None, Toolchain=None)
def __getitem__(self, Key):
FilePath = Key[0]
KeyLength = len(Key)
@@ -123,7 +123,7 @@ class WorkspaceDatabase(object):
Arch,
MetaFileStorage(self.WorkspaceDb, FilePath, FileType)
)
- # alwasy do post-process, in case of macros change
+ # always do post-process, in case of macros change
MetaFile.DoPostProcess()
# object the build is based on
BuildObject = self._GENERATOR_[FileType](
@@ -149,7 +149,7 @@ class WorkspaceDatabase(object):
#
# @param DbPath Path of database file
# @param GlobalMacros Global macros used for replacement during file parsing
- # @prarm RenewDb=False Create new database file if it's already there
+ # @param RenewDb=False Create new database file if it's already there
#
def __init__(self):
self.DB = dict()
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py
index cdea312864..99e79d4dca 100644
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -488,7 +488,7 @@ class BuildTask:
time.sleep(0.1)
except BaseException as X:
#
- # TRICK: hide the output of threads left runing, so that the user can
+ # TRICK: hide the output of threads left running, so that the user can
# catch the error message easily
#
EdkLogger.SetLevel(EdkLogger.ERROR)
@@ -613,7 +613,7 @@ class BuildTask:
self.CompleteFlag = True
except:
#
- # TRICK: hide the output of threads left runing, so that the user can
+ # TRICK: hide the output of threads left running, so that the user can
# catch the error message easily
#
if not BuildTask._ErrorFlag.isSet():
@@ -1380,7 +1380,7 @@ class Build():
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
#
- # Collect funtion address from Map file
+ # Collect function address from Map file
#
ImageMapTable = ModuleOutputImage.replace('.efi', '.map')
FunctionList = []
@@ -1433,7 +1433,7 @@ class Build():
#
MapBuffer.append('(IMAGE=%s)\n\n' % (ModuleDebugImage))
#
- # Add funtion address
+ # Add function address
#
for Function in FunctionList:
if AddrIsOffset: