summaryrefslogtreecommitdiffstats
path: root/BaseTools/Source/Python
diff options
context:
space:
mode:
Diffstat (limited to 'BaseTools/Source/Python')
-rw-r--r--BaseTools/Source/Python/AutoGen/AutoGen.py334
-rw-r--r--BaseTools/Source/Python/AutoGen/GenMake.py26
-rw-r--r--BaseTools/Source/Python/AutoGen/InfSectionParser.py107
-rw-r--r--BaseTools/Source/Python/AutoGen/StrGather.py8
-rw-r--r--BaseTools/Source/Python/AutoGen/UniClassObject.py7
-rw-r--r--BaseTools/Source/Python/Common/DataType.py2
-rw-r--r--BaseTools/Source/Python/Common/GlobalData.py10
-rw-r--r--BaseTools/Source/Python/Common/Misc.py124
-rw-r--r--BaseTools/Source/Python/Common/TargetTxtClassObject.py34
-rw-r--r--BaseTools/Source/Python/Common/ToolDefClassObject.py19
-rw-r--r--BaseTools/Source/Python/CommonDataClass/DataClass.py3
-rw-r--r--BaseTools/Source/Python/GenFds/CompressSection.py2
-rw-r--r--BaseTools/Source/Python/GenFds/DepexSection.py21
-rw-r--r--BaseTools/Source/Python/GenFds/EfiSection.py8
-rw-r--r--BaseTools/Source/Python/GenFds/FdfParser.py16
-rw-r--r--BaseTools/Source/Python/GenFds/FfsFileStatement.py2
-rw-r--r--BaseTools/Source/Python/GenFds/FfsInfStatement.py41
-rw-r--r--BaseTools/Source/Python/GenFds/GenFds.py36
-rw-r--r--BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py18
-rw-r--r--BaseTools/Source/Python/GenFds/GuidSection.py12
-rw-r--r--BaseTools/Source/Python/GenFds/Section.py6
-rw-r--r--BaseTools/Source/Python/TargetTool/TargetTool.py2
-rw-r--r--BaseTools/Source/Python/Trim/Trim.py2
-rw-r--r--BaseTools/Source/Python/UPT/Logger/StringTable.py2
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileCommentParser.py23
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileParser.py25
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceDatabase.py83
-rw-r--r--BaseTools/Source/Python/build/build.py47
-rw-r--r--BaseTools/Source/Python/sitecustomize.py4
29 files changed, 875 insertions, 149 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
index 8c62ac8e72..8cd387072e 100644
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -40,15 +40,27 @@ import Common.VpdInfoFile as VpdInfoFile
from GenPcdDb import CreatePcdDatabaseCode
from Workspace.MetaFileCommentParser import UsageList
+import InfSectionParser
+
## Regular expression for splitting Dependency Expression string into tokens
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
+#
+# Match name = variable
+#
+gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
+#
+# The format of guid in efivarstore statement likes following and must be correct:
+# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
+#
+gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
+
## Mapping Makefile type
gMakeTypeMap = {"MSFT":"nmake", "GCC":"gmake"}
## Build rule configuration file
-gBuildRuleFile = 'Conf/build_rule.txt'
+gDefaultBuildRuleFile = 'Conf/build_rule.txt'
## Build rule default version
AutoGenReqBuildRuleVerNum = "0.1"
@@ -60,22 +72,40 @@ gAutoGenStringFileName = "%(module_name)sStrDefs.h"
gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
gAutoGenDepexFileName = "%(module_name)s.depex"
+gInfSpecVersion = "0x00010017"
+
#
# Template string to generic AsBuilt INF
#
gAsBuiltInfHeaderString = TemplateString("""${header_comments}
+# DO NOT EDIT
+# FILE auto-generated
+
[Defines]
- INF_VERSION = 0x00010016
+ INF_VERSION = ${module_inf_version}
BASE_NAME = ${module_name}
FILE_GUID = ${module_guid}
- MODULE_TYPE = ${module_module_type}
- VERSION_STRING = ${module_version_string}${BEGIN}
+ MODULE_TYPE = ${module_module_type}${BEGIN}
+ VERSION_STRING = ${module_version_string}${END}${BEGIN}
PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
- PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}
-
-[Packages]${BEGIN}
+ PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
+ ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
+ UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
+ CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
+ DESTRUCTOR = ${module_destructor}${END}${BEGIN}
+ SHADOW = ${module_shadow}${END}${BEGIN}
+ PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
+ PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
+ PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
+ PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
+ BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
+ SPEC = ${module_spec}${END}${BEGIN}
+ UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
+ MODULE_UNI_FILE = ${module_uni_file}${END}
+
+[Packages.${module_arch}]${BEGIN}
${package_item}${END}
[Binaries.${module_arch}]${BEGIN}
@@ -84,19 +114,32 @@ gAsBuiltInfHeaderString = TemplateString("""${header_comments}
[PatchPcd.${module_arch}]${BEGIN}
${patchablepcd_item}
${END}
+
[Protocols.${module_arch}]${BEGIN}
${protocol_item}
${END}
+
[Ppis.${module_arch}]${BEGIN}
${ppi_item}
${END}
+
[Guids.${module_arch}]${BEGIN}
${guid_item}
${END}
+
[PcdEx.${module_arch}]${BEGIN}
${pcd_item}
${END}
+[LibraryClasses.${module_arch}]
+## @LIB_INSTANCES${BEGIN}
+# ${libraryclasses_item}${END}
+
+${depexsection_item}
+
+${tail_comments}
+
+[BuildOptions.${module_arch}]
## @AsBuilt${BEGIN}
## ${flags_item}${END}
""")
@@ -240,6 +283,15 @@ class WorkspaceAutoGen(AutoGen):
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
+ # Validate SKU ID
+ if not self.SkuId:
+ self.SkuId = 'DEFAULT'
+
+ if self.SkuId not in self.Platform.SkuIds:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData="SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"
+ % (self.SkuId, " ".join(self.Platform.SkuIds.keys())))
+
# parse FDF file to get PCDs in it, if any
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
@@ -299,6 +351,7 @@ class WorkspaceAutoGen(AutoGen):
DecPcds = {}
DecPcdsKey = set()
PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)
+ #Collect package set information from INF of FDF
PkgSet = set()
for Inf in ModuleList:
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
@@ -355,10 +408,10 @@ class WorkspaceAutoGen(AutoGen):
# Check PCD type and definition between DSC and DEC
#
self._CheckPcdDefineAndType()
-
- if self.FdfFile:
- self._CheckDuplicateInFV(Fdf)
-
+
+# if self.FdfFile:
+# self._CheckDuplicateInFV(Fdf)
+
self._BuildDir = None
self._FvDir = None
self._MakeFileDir = None
@@ -662,7 +715,7 @@ class WorkspaceAutoGen(AutoGen):
ExtraData=None
)
Count += 1
-
+ ## Generate fds command
def _GenFdsCommand(self):
return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()
@@ -817,9 +870,6 @@ class PlatformAutoGen(AutoGen):
# get library/modules for build
self.LibraryBuildDirectoryList = []
self.ModuleBuildDirectoryList = []
- # get the original module/package/platform objects
- self.LibraryBuildDirectoryList = []
- self.ModuleBuildDirectoryList = []
return True
def __repr__(self):
@@ -859,7 +909,7 @@ class PlatformAutoGen(AutoGen):
Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget,
self.ToolChain, self.Arch, self.MetaFile)
Ma.CreateMakeFile(True)
- Ma.CreateAsBuiltInf()
+ #Ma.CreateAsBuiltInf()
# no need to create makefile for the platform more than once
if self.IsMakeFileCreated:
@@ -923,8 +973,11 @@ class PlatformAutoGen(AutoGen):
if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
+ # Check the PCD from Binary INF or Source INF
if M.IsBinaryModule == True:
PcdFromModule.IsFromBinaryInf = True
+
+ # Check the PCD from DSC or not
if (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds.keys():
PcdFromModule.IsFromDsc = True
else:
@@ -943,11 +996,11 @@ class PlatformAutoGen(AutoGen):
PcdFromModule.IsFromBinaryInf == False:
# Print warning message to let the developer make a determine.
if PcdFromModule not in PcdNotInDb:
- EdkLogger.warn("build",
- "A PCD listed in the DSC (%s.%s, %s) is used by a module not in the FDF. If the PCD is not used by any module listed in the FDF this PCD will be ignored. " \
- % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, self.Platform.MetaFile.Path),
- File=self.MetaFile, \
- ExtraData=None)
+ # EdkLogger.warn("build",
+ # "A PCD listed in the DSC (%s.%s, %s) is used by a module not in the FDF. If the PCD is not used by any module listed in the FDF this PCD will be ignored. " \
+ # % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, self.Platform.MetaFile.Path),
+ # File=self.MetaFile, \
+ # ExtraData=None)
PcdNotInDb.append(PcdFromModule)
continue
# If one of the Source built modules listed in the DSC is not listed in
@@ -1017,6 +1070,11 @@ class PlatformAutoGen(AutoGen):
elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:
self._NonDynaPcdList_.append(PcdFromModule)
if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in GenC.gDynamicExPcd:
+ # Overwrite the phase of any the same PCD existing, if Phase is PEI.
+ # It is to solve the case that a dynamic PCD used by a PEM module/PEI
+ # module & DXE module at a same time.
+ # Overwrite the type of the PCDs in source INF by the type of AsBuild
+ # INF file as DynamicEx.
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
self._DynaPcdList_[Index].Type = PcdFromModule.Type
@@ -1050,16 +1108,16 @@ class PlatformAutoGen(AutoGen):
# If PCD is listed in a PcdsDynamicHii, PcdsDynamicExHii, PcdsDynamicHii or PcdsDynamicExHii
# section, and the PCD is not used by any module that is listed in the DSC file, the build
# provide a warning message.
- for PcdKey in self.Platform.Pcds.keys():
- Pcd = self.Platform.Pcds[PcdKey]
- if Pcd not in self._DynamicPcdList + PcdNotInDb and \
- Pcd.Type in [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_HII, TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_HII]:
- # Print warning message to let the developer make a determine.
- EdkLogger.warn("build",
- "A %s PCD listed in the DSC (%s.%s, %s) is not used by any module." \
- % (Pcd.Type, Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path),
- File=self.MetaFile, \
- ExtraData=None)
+ #for PcdKey in self.Platform.Pcds.keys():
+ # Pcd = self.Platform.Pcds[PcdKey]
+ # if Pcd not in self._DynamicPcdList + PcdNotInDb and \
+ # Pcd.Type in [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_HII, TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_HII]:
+ # # Print warning message to let the developer make a determine.
+ #EdkLogger.warn("build",
+ # "A %s PCD listed in the DSC (%s.%s, %s) is not used by any module." \
+ # % (Pcd.Type, Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path),
+ # File=self.MetaFile, \
+ # ExtraData=None)
#
# Sort dynamic PCD list to:
# 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
@@ -1421,7 +1479,7 @@ class PlatformAutoGen(AutoGen):
self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
return self._EdkIIBuildOption
- ## Parse build_rule.txt in $(WORKSPACE)/Conf/build_rule.txt
+ ## Parse build_rule.txt in Conf Directory.
#
# @retval BuildRule object
#
@@ -1431,7 +1489,7 @@ class PlatformAutoGen(AutoGen):
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
if BuildRuleFile in [None, '']:
- BuildRuleFile = gBuildRuleFile
+ BuildRuleFile = gDefaultBuildRuleFile
self._BuildRule = BuildRule(BuildRuleFile)
if self._BuildRule._FileVersion == "":
self._BuildRule._FileVersion = AutoGenReqBuildRuleVerNum
@@ -2140,6 +2198,9 @@ class ModuleAutoGen(AutoGen):
return False
self.SourceDir = self.MetaFile.SubDir
+ if self.SourceDir.upper().find(self.WorkspaceDir.upper()) == 0:
+ self.SourceDir = self.SourceDir[len(self.WorkspaceDir) + 1:]
+
self.SourceOverrideDir = None
# use overrided path defined in DSC file
if self.MetaFile.Key in GlobalData.gOverrideDir:
@@ -2284,6 +2345,16 @@ class ModuleAutoGen(AutoGen):
## Return the module meta-file GUID
def _GetGuid(self):
+ #
+ # To build same module more than once, the module path with FILE_GUID overridden has
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path
+ # in DSC. The overridden GUID can be retrieved from file name
+ #
+ if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
+ #
+ # Length of GUID is 36
+ #
+ return os.path.basename(self.MetaFile.Path)[:36]
return self.Module.Guid
## Return the module version
@@ -2393,7 +2464,66 @@ class ModuleAutoGen(AutoGen):
continue
PackageList.append(Package)
return PackageList
-
+
+ ## Get the depex string
+ #
+ # @return : a string contain all depex expresion.
+ def _GetDepexExpresionString(self):
+ DepexStr = ''
+ DepexList = []
+ ## DPX_SOURCE IN Define section.
+ if self.Module.DxsFile:
+ return DepexStr
+ for M in [self.Module] + self.DependentLibraryList:
+ Filename = M.MetaFile.Path
+ InfObj = InfSectionParser.InfSectionParser(Filename)
+ DepexExpresionList = InfObj.GetDepexExpresionList()
+ for DepexExpresion in DepexExpresionList:
+ for key in DepexExpresion.keys():
+ Arch, ModuleType = key
+ # the type of build module is USER_DEFINED.
+ # All different DEPEX section tags would be copied into the As Built INF file
+ # and there would be separate DEPEX section tags
+ if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:
+ if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
+ DepexList.append({(Arch, ModuleType): DepexExpresion[key][:]})
+ else:
+ if Arch.upper() == TAB_ARCH_COMMON or \
+ (Arch.upper() == self.Arch.upper() and \
+ ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
+ DepexList.append({(Arch, ModuleType): DepexExpresion[key][:]})
+
+ #the type of build module is USER_DEFINED.
+ if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:
+ for Depex in DepexList:
+ for key in Depex.keys():
+ DepexStr += '[Depex.%s.%s]\n' % key
+ DepexStr += '\n'.join(['# '+ val for val in Depex[key]])
+ DepexStr += '\n\n'
+ if not DepexStr:
+ return '[Depex.%s]\n' % self.Arch
+ return DepexStr
+
+ #the type of build module not is USER_DEFINED.
+ Count = 0
+ for Depex in DepexList:
+ Count += 1
+ if DepexStr != '':
+ DepexStr += ' AND '
+ DepexStr += '('
+ for D in Depex.values():
+ DepexStr += ' '.join([val for val in D])
+ Index = DepexStr.find('END')
+ if Index > -1 and Index == len(DepexStr) - 3:
+ DepexStr = DepexStr[:-3]
+ DepexStr = DepexStr.strip()
+ DepexStr += ')'
+ if Count == 1:
+ DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
+ if not DepexStr:
+ return '[Depex.%s]\n' % self.Arch
+ return '[Depex.%s]\n# ' % self.Arch + DepexStr
+
## Merge dependency expression
#
# @retval list The token list of the dependency expression after parsed
@@ -2546,7 +2676,7 @@ class ModuleAutoGen(AutoGen):
## Return a list of files which can be built from source
#
# What kind of files can be built is determined by build rules in
- # $(WORKSPACE)/Conf/build_rule.txt and toolchain family.
+ # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
#
def _GetSourceFileList(self):
if self._SourceFileList == None:
@@ -2901,6 +3031,71 @@ class ModuleAutoGen(AutoGen):
self._IncludePathList.append(str(Inc))
return self._IncludePathList
+ ## Get HII EX PCDs which maybe used by VFR
+ #
+ # efivarstore used by VFR may relate with HII EX PCDs
+ # Get the variable name and GUID from efivarstore and HII EX PCD
+ # List the HII EX PCDs in As Built INF if both name and GUID match.
+ #
+ # @retval list HII EX PCDs
+ #
+ def _GetPcdsMaybeUsedByVfr(self):
+ if not self.SourceFileList:
+ return []
+
+ NameGuids = []
+ for SrcFile in self.SourceFileList:
+ if SrcFile.Ext.lower() != '.vfr':
+ continue
+ Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
+ if not os.path.exists(Vfri):
+ continue
+ VfriFile = open(Vfri, 'r')
+ Content = VfriFile.read()
+ VfriFile.close()
+ Pos = Content.find('efivarstore')
+ while Pos != -1:
+ #
+ # Make sure 'efivarstore' is the start of efivarstore statement
+ # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
+ #
+ Index = Pos - 1
+ while Index >= 0 and Content[Index] in ' \t\r\n':
+ Index -= 1
+ if Index >= 0 and Content[Index] != ';':
+ Pos = Content.find('efivarstore', Pos + len('efivarstore'))
+ continue
+ #
+ # 'efivarstore' must be followed by name and guid
+ #
+ Name = gEfiVarStoreNamePattern.search(Content, Pos)
+ if not Name:
+ break
+ Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
+ if not Guid:
+ break
+ NameArray = ConvertStringToByteArray('L"' + Name.group(1) + '"')
+ NameGuids.append((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
+ Pos = Content.find('efivarstore', Name.end())
+ if not NameGuids:
+ return []
+ HiiExPcds = []
+ for Pcd in self.PlatformInfo.Platform.Pcds.values():
+ if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
+ continue
+ for SkuName in Pcd.SkuInfoList:
+ SkuInfo = Pcd.SkuInfoList[SkuName]
+ Name = ConvertStringToByteArray(SkuInfo.VariableName)
+ Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList)
+ if not Value:
+ continue
+ Guid = GuidStructureStringToGuidString(Value)
+ if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
+ HiiExPcds.append(Pcd)
+ break
+
+ return HiiExPcds
+
## Create AsBuilt INF file the module
#
def CreateAsBuiltInf(self):
@@ -2963,6 +3158,16 @@ class ModuleAutoGen(AutoGen):
break
if Found: break
+ VfrPcds = self._GetPcdsMaybeUsedByVfr()
+ for Pkg in self.PlatformInfo.PackageList:
+ if Pkg in Packages:
+ continue
+ for VfrPcd in VfrPcds:
+ if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, 'DynamicEx') in Pkg.Pcds or
+ (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, 'Dynamic') in Pkg.Pcds):
+ Packages += [Pkg]
+ break
+
ModuleType = self.ModuleType
if ModuleType == 'UEFI_DRIVER' and self.DepexGenerated:
ModuleType = 'DXE_DRIVER'
@@ -2971,14 +3176,30 @@ class ModuleAutoGen(AutoGen):
if self.PcdIsDriver != '':
DriverType = self.PcdIsDriver
+ Guid = self.Guid
+ MDefs = self.Module.Defines
+
AsBuiltInfDict = {
'module_name' : self.Name,
- 'module_guid' : self.Guid,
+ 'module_guid' : Guid,
'module_module_type' : ModuleType,
- 'module_version_string' : self.Version,
+ 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
'pcd_is_driver_string' : [],
'module_uefi_specification_version' : [],
'module_pi_specification_version' : [],
+ 'module_entry_point' : self.Module.ModuleEntryPointList,
+ 'module_unload_image' : self.Module.ModuleUnloadImageList,
+ 'module_constructor' : self.Module.ConstructorList,
+ 'module_destructor' : self.Module.DestructorList,
+ 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
+ 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
+ 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
+ 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
+ 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
+ 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
+ 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
+ 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
+ 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
'module_arch' : self.Arch,
'package_item' : ['%s' % (Package.MetaFile.File.replace('\\','/')) for Package in Packages],
'binary_item' : [],
@@ -2990,7 +3211,12 @@ class ModuleAutoGen(AutoGen):
'flags_item' : [],
'libraryclasses_item' : []
}
- AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
+
+ if self.AutoGenVersion > int(gInfSpecVersion, 0):
+ AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
+ else:
+ AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
+
if DriverType:
AsBuiltInfDict['pcd_is_driver_string'] += [DriverType]
@@ -3036,6 +3262,8 @@ class ModuleAutoGen(AutoGen):
StartPos = Index
break
AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
+ AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
+
GenList = [
(self.ProtocolList, self._ProtocolComments, 'protocol_item'),
(self.PpiList, self._PpiComments, 'ppi_item'),
@@ -3116,28 +3344,42 @@ class ModuleAutoGen(AutoGen):
if PcdComments:
PcdItem = PcdComments + '\n ' + PcdItem
AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
- for Pcd in Pcds:
+
+ HiiPcds = []
+ for Pcd in Pcds + VfrPcds:
PcdComments = ''
PcdCommentList = []
HiiInfo = ''
+ SkuId = ''
if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
for SkuName in Pcd.SkuInfoList:
SkuInfo = Pcd.SkuInfoList[SkuName]
+ SkuId = SkuInfo.SkuId
HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
break
+ if SkuId:
+ #
+ # Don't generate duplicated HII PCD
+ #
+ if (SkuId, Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in HiiPcds:
+ continue
+ else:
+ HiiPcds.append((SkuId, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
if HiiInfo:
UsageIndex = -1
+ UsageStr = ''
for Index, Comment in enumerate(PcdCommentList):
for Usage in UsageList:
if Comment.find(Usage) != -1:
+ UsageStr = Usage
UsageIndex = Index
break
if UsageIndex != -1:
- PcdCommentList[UsageIndex] = PcdCommentList[UsageIndex] + ' ' + HiiInfo
+ PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
else:
- PcdCommentList.append('## ' + HiiInfo)
+ PcdCommentList.append('## UNDEFINED ' + HiiInfo)
PcdComments = '\n '.join(PcdCommentList)
PcdEntry = Pcd.TokenSpaceGuidCName + '.' + Pcd.TokenCName
if PcdComments:
@@ -3146,6 +3388,16 @@ class ModuleAutoGen(AutoGen):
for Item in self.BuildOption:
if 'FLAGS' in self.BuildOption[Item]:
AsBuiltInfDict['flags_item'] += ['%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip())]
+
+ # Generated LibraryClasses section in comments.
+ for Library in self.LibraryAutoGenList:
+ AsBuiltInfDict['libraryclasses_item'] += [Library.MetaFile.File.replace('\\', '/')]
+
+ # Generated depex expression section in comments.
+ AsBuiltInfDict['depexsection_item'] = ''
+ DepexExpresion = self._GetDepexExpresionString()
+ if DepexExpresion:
+ AsBuiltInfDict['depexsection_item'] = DepexExpresion
AsBuiltInf = TemplateString()
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
index 0ad368a4ce..50bee0dbc1 100644
--- a/BaseTools/Source/Python/AutoGen/GenMake.py
+++ b/BaseTools/Source/Python/AutoGen/GenMake.py
@@ -247,6 +247,7 @@ MODULE_FILE = ${module_file}
MODULE_FILE_BASE_NAME = ${module_file_base_name}
BASE_NAME = $(MODULE_NAME)
MODULE_RELATIVE_DIR = ${module_relative_directory}
+PACKAGE_RELATIVE_DIR = ${package_relative_directory}
MODULE_DIR = $(WORKSPACE)${separator}${module_relative_directory}
MODULE_ENTRY_POINT = ${module_entry_point}
@@ -552,6 +553,10 @@ cleanlib:
Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join(D, MakefileName)}
LibraryMakeCommandList.append(Command)
+ package_rel_dir = self._AutoGenObject.SourceDir
+ if os.sep in package_rel_dir:
+ package_rel_dir = package_rel_dir[package_rel_dir.index(os.sep) + 1:]
+
MakefileTemplateDict = {
"makefile_header" : self._FILE_HEADER_[self._FileType],
"makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
@@ -569,7 +574,8 @@ cleanlib:
"module_file" : self._AutoGenObject.MetaFile.Name,
"module_file_base_name" : self._AutoGenObject.MetaFile.BaseName,
"module_relative_directory" : self._AutoGenObject.SourceDir,
- "module_extra_defines" : ["%s = %s" % (k, v) for k,v in self._AutoGenObject.Module.Defines.iteritems()],
+ "package_relative_directory": package_rel_dir,
+ "module_extra_defines" : ["%s = %s" % (k, v) for k, v in self._AutoGenObject.Module.Defines.iteritems()],
"architecture" : self._AutoGenObject.Arch,
"toolchain_tag" : self._AutoGenObject.ToolChain,
@@ -1177,7 +1183,8 @@ cleanlib:
def GetModuleBuildDirectoryList(self):
DirList = []
for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
return DirList
## Get the root directory list for intermediate files of all libraries build
@@ -1187,7 +1194,8 @@ cleanlib:
def GetLibraryBuildDirectoryList(self):
DirList = []
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
+ if not LibraryAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
return DirList
_TemplateDict = property(_CreateTemplateDict)
@@ -1200,7 +1208,7 @@ cleanlib:
#
class TopLevelMakefile(BuildFile):
## template used to generate toplevel makefile
- _TEMPLATE_ = TemplateString('''${BEGIN}\tGenFds -f ${fdf_file} -o ${platform_build_directory} -t ${toolchain_tag} -b ${build_target} -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -C ${cap} ${END}${BEGIN} -D ${macro} ${END}''')
+ _TEMPLATE_ = TemplateString('''${BEGIN}\tGenFds -f ${fdf_file} --conf=${conf_directory} -o ${platform_build_directory} -t ${toolchain_tag} -b ${build_target} -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -C ${cap} ${END}${BEGIN} -D ${macro} ${END}''')
## Constructor of TopLevelMakefile
#
@@ -1258,6 +1266,9 @@ class TopLevelMakefile(BuildFile):
if GlobalData.gCaseInsensitive:
ExtraOption += " -c"
+ if GlobalData.gIgnoreSource:
+ ExtraOption += " --ignore-sources"
+
MakefileName = self._FILE_NAME_[self._FileType]
SubBuildCommandList = []
for A in PlatformInfo.ArchList:
@@ -1272,6 +1283,7 @@ class TopLevelMakefile(BuildFile):
"platform_guid" : PlatformInfo.Guid,
"platform_version" : PlatformInfo.Version,
"platform_build_directory" : PlatformInfo.BuildDir,
+ "conf_directory" : GlobalData.gConfDirectory,
"toolchain_tag" : PlatformInfo.ToolChain,
"build_target" : PlatformInfo.BuildTarget,
@@ -1301,7 +1313,8 @@ class TopLevelMakefile(BuildFile):
def GetModuleBuildDirectoryList(self):
DirList = []
for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
return DirList
## Get the root directory list for intermediate files of all libraries build
@@ -1311,7 +1324,8 @@ class TopLevelMakefile(BuildFile):
def GetLibraryBuildDirectoryList(self):
DirList = []
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
+ if not LibraryAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
return DirList
_TemplateDict = property(_CreateTemplateDict)
diff --git a/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
new file mode 100644
index 0000000000..7f78236548
--- /dev/null
+++ b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
@@ -0,0 +1,107 @@
+## @file
+# Parser a Inf file and Get specify section data.
+#
+# Copyright (c) 2007 - 2012, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+## Import Modules
+#
+
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.DataType import *
+
+
+class InfSectionParser():
+ def __init__(self, FilePath):
+ self._FilePath = FilePath
+ self._FileSectionDataList = []
+ self._ParserInf()
+
+ def _ParserInf(self):
+ Filename = self._FilePath
+ FileLinesList = []
+ UserExtFind = False
+ FindEnd = True
+ FileLastLine = False
+ SectionLine = ''
+ SectionData = []
+
+ try:
+ FileLinesList = open(Filename, "r", 0).readlines()
+ except BaseException:
+ EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % Filename)
+
+ for Index in range(0, len(FileLinesList)):
+ line = str(FileLinesList[Index]).strip()
+ if Index + 1 == len(FileLinesList):
+ FileLastLine = True
+ NextLine = ''
+ else:
+ NextLine = str(FileLinesList[Index + 1]).strip()
+ if UserExtFind and FindEnd == False:
+ if line:
+ SectionData.append(line)
+ if line.lower().startswith(TAB_SECTION_START) and line.lower().endswith(TAB_SECTION_END):
+ SectionLine = line
+ UserExtFind = True
+ FindEnd = False
+
+ if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
+ NextLine[-1] == TAB_SECTION_END) or FileLastLine:
+ UserExtFind = False
+ FindEnd = True
+ self._FileSectionDataList.append({SectionLine: SectionData[:]})
+ SectionData = []
+ SectionLine = ''
+
+
+ # Get depex expresion
+ #
+ # @return: a list include some dictionary that key is section and value is a list contain all data.
+ def GetDepexExpresionList(self):
+ DepexExpresionList = []
+ if not self._FileSectionDataList:
+ return DepexExpresionList
+ for SectionDataDict in self._FileSectionDataList:
+ for key in SectionDataDict.keys():
+ if key.lower() == "[depex]" or key.lower().startswith("[depex."):
+ SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
+ SubSectionList = [SectionLine]
+ if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
+ SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
+ for SubSection in SubSectionList:
+ SectionList = SubSection.split(TAB_SPLIT)
+ SubKey = ()
+ if len(SectionList) == 1:
+ SubKey = (TAB_ARCH_COMMON, TAB_ARCH_COMMON)
+ elif len(SectionList) == 2:
+ SubKey = (SectionList[1], TAB_ARCH_COMMON)
+ elif len(SectionList) == 3:
+ SubKey = (SectionList[1], SectionList[2])
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, 'Section %s is invalid.' % key)
+ DepexExpresionList.append({SubKey: SectionDataDict[key]})
+ return DepexExpresionList
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py
index 20af015bcf..48c396a9aa 100644
--- a/BaseTools/Source/Python/AutoGen/StrGather.py
+++ b/BaseTools/Source/Python/AutoGen/StrGather.py
@@ -1,3 +1,7 @@
+## @file
+# This file is used to parse a strings file and create or add to a string database
+# file.
+#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -7,10 +11,6 @@
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-#This file is used to parse a strings file and create or add to a string database file.
-#
-
##
# Import Modules
#
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py
index e676ad6ec8..58f865ed8b 100644
--- a/BaseTools/Source/Python/AutoGen/UniClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -1,3 +1,6 @@
+## @file
+# This file is used to collect all defined strings in multiple uni files
+#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -7,10 +10,6 @@
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-#This file is used to collect all defined strings in multiple uni files
-#
-
##
# Import Modules
#
diff --git a/BaseTools/Source/Python/Common/DataType.py b/BaseTools/Source/Python/Common/DataType.py
index f1408569e0..57e5063ff2 100644
--- a/BaseTools/Source/Python/Common/DataType.py
+++ b/BaseTools/Source/Python/Common/DataType.py
@@ -373,7 +373,7 @@ TAB_INF_GUIDTYPE_TSG = 'TokenSpaceGuid'
TAB_INF_GUIDTYPE_VAR = 'Variable'
TAB_INF_GUIDTYPE_PROTOCOL = 'PROTOCOL'
TAB_INF_GUIDTYPE_PPI = 'PPI'
-TAB_INF_GUIDTYPE_UNDEFINED = 'UNDEFINED'
+TAB_INF_USAGE_UNDEFINED = 'UNDEFINED'
#
# Dec Definitions
diff --git a/BaseTools/Source/Python/Common/GlobalData.py b/BaseTools/Source/Python/Common/GlobalData.py
index 1f9d91c5d8..218034ba33 100644
--- a/BaseTools/Source/Python/Common/GlobalData.py
+++ b/BaseTools/Source/Python/Common/GlobalData.py
@@ -1,7 +1,7 @@
## @file
# This file is used to define common static strings used by INF/DEC/DSC files
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -69,3 +69,11 @@ gIgnoreSource = False
# FDF parser
#
gFdfParser = None
+
+#
+# If a module is built more than once with different PCDs or library classes
+# a temporary INF file with same content is created, the temporary file is removed
+# when build exits.
+#
+gTempInfs = []
+
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
index e9e41de02e..01297cd0a9 100644
--- a/BaseTools/Source/Python/Common/Misc.py
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -23,6 +23,7 @@ import time
import re
import cPickle
import array
+import shutil
from UserDict import IterableUserDict
from UserList import UserList
@@ -43,6 +44,90 @@ gFileTimeStampCache = {} # {file path : file time stamp}
## Dictionary used to store dependencies of files
gDependencyDatabase = {} # arch : {file path : [dependent files list]}
+## Routine to process duplicated INF
+#
+# This function is called by following two cases:
+# Case 1 in DSC:
+# [components.arch]
+# Pkg/module/module.inf
+# Pkg/module/module.inf {
+# <Defines>
+# FILE_GUID = 0D1B936F-68F3-4589-AFCC-FB8B7AEBC836
+# }
+# Case 2 in FDF:
+# INF Pkg/module/module.inf
+# INF FILE_GUID = 0D1B936F-68F3-4589-AFCC-FB8B7AEBC836 Pkg/module/module.inf
+#
+# This function copies Pkg/module/module.inf to
+# Conf/.cache/0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf
+#
+# @param Path Original PathClass object
+# @param BaseName New file base name
+#
+# @retval return the new PathClass object
+#
+def ProcessDuplicatedInf(Path, BaseName, Workspace):
+ Filename = os.path.split(Path.File)[1]
+ if '.' in Filename:
+ Filename = BaseName + Path.BaseName + Filename[Filename.rfind('.'):]
+ else:
+ Filename = BaseName + Path.BaseName
+
+ #
+ # If -N is specified on command line, cache is disabled
+ # The directory has to be created
+ #
+ DbDir = os.path.split(GlobalData.gDatabasePath)[0]
+ if not os.path.exists(DbDir):
+ os.makedirs(DbDir)
+ #
+ # A temporary INF is copied to database path which must have write permission
+ # The temporary will be removed at the end of build
+ # In case of name conflict, the file name is
+ # FILE_GUIDBaseName (0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf)
+ #
+ TempFullPath = os.path.join(DbDir,
+ Filename)
+ RtPath = PathClass(Path.File, Workspace)
+ #
+ # Modify the full path to temporary path, keep other unchanged
+ #
+ # To build same module more than once, the module path with FILE_GUID overridden has
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
+ # in DSC which is used as relative path by C files and other files in INF.
+ # A trick was used: all module paths are PathClass instances, after the initialization
+ # of PathClass, the PathClass.Path is overridden by the temporary INF path.
+ #
+ # The reason for creating a temporary INF is:
+ # Platform.Modules which is the base to create ModuleAutoGen objects is a dictionary,
+ # the key is the full path of INF, the value is an object to save overridden library instances, PCDs.
+ # A different key for the same module is needed to create different output directory,
+ # retrieve overridden PCDs, library instances.
+ #
+ # The BaseName is the FILE_GUID which is also the output directory name.
+ #
+ #
+ RtPath.Path = TempFullPath
+ RtPath.BaseName = BaseName
+ #
+ # If file exists, compare contents
+ #
+ if os.path.exists(TempFullPath):
+ with open(str(Path), 'rb') as f1: Src = f1.read()
+ with open(TempFullPath, 'rb') as f2: Dst = f2.read()
+ if Src == Dst:
+ return RtPath
+ GlobalData.gTempInfs.append(TempFullPath)
+ shutil.copy2(str(Path), TempFullPath)
+ return RtPath
+
+## Remove temporary created INFs whose paths were saved in gTempInfs
+#
+def ClearDuplicatedInf():
+ for File in GlobalData.gTempInfs:
+ if os.path.exists(File):
+ os.remove(File)
+
## callback routine for processing variable option
#
# This function can be used to process variable number of option values. The
@@ -1456,6 +1541,45 @@ def CommonPath(PathList):
return os.path.sep.join(P1[:Index])
return os.path.sep.join(P1)
+#
+# Convert string to C format array
+#
+def ConvertStringToByteArray(Value):
+ Value = Value.strip()
+ if not Value:
+ return None
+ if Value[0] == '{':
+ if not Value.endswith('}'):
+ return None
+ Value = Value.replace(' ', '').replace('{', '').replace('}', '')
+ ValFields = Value.split(',')
+ try:
+ for Index in range(len(ValFields)):
+ ValFields[Index] = str(int(ValFields[Index], 0))
+ except ValueError:
+ return None
+ Value = '{' + ','.join(ValFields) + '}'
+ return Value
+
+ Unicode = False
+ if Value.startswith('L"'):
+ if not Value.endswith('"'):
+ return None
+ Value = Value[1:]
+ Unicode = True
+ elif not Value.startswith('"') or not Value.endswith('"'):
+ return None
+
+ Value = eval(Value) # translate escape character
+ NewValue = '{'
+ for Index in range(0,len(Value)):
+ if Unicode:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
+ else:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
+ Value = NewValue + '0}'
+ return Value
+
class PathClass(object):
def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False,
Arch='COMMON', ToolChainFamily='', Target='', TagName='', ToolCode=''):
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
index 89e3937086..387e515230 100644
--- a/BaseTools/Source/Python/Common/TargetTxtClassObject.py
+++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
@@ -21,7 +21,7 @@ from BuildToolError import *
import GlobalData
from Common.LongFilePathSupport import OpenLongFilePath as open
-gDefaultTargetTxtFile = "Conf/target.txt"
+gDefaultTargetTxtFile = "target.txt"
## TargetTxtClassObject
#
@@ -44,6 +44,7 @@ class TargetTxtClassObject(object):
DataType.TAB_TAT_DEFINES_TARGET_ARCH : [],
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
}
+ self.ConfDirectoryPath = ""
if Filename != None:
self.LoadTargetTxtFile(Filename)
@@ -78,7 +79,8 @@ class TargetTxtClassObject(object):
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
F = None
try:
- F = open(FileName,'r')
+ F = open(FileName, 'r')
+ self.ConfDirectoryPath = os.path.dirname(FileName)
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
if F != None:
@@ -99,6 +101,26 @@ class TargetTxtClassObject(object):
if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
+ if Key == DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.TargetTxtDictionary[Key]:
+ if self.TargetTxtDictionary[Key].startswith("Conf/"):
+ Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ if not os.path.exists(Tools_Def) or not os.path.isfile(Tools_Def):
+ # If Conf/Conf does not exist, try just the Conf/ directory
+ Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
+ else:
+ # The File pointed to by TOOL_CHAIN_CONF is not in a Conf/ directory
+ Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ self.TargetTxtDictionary[Key] = Tools_Def
+ if Key == DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF and self.TargetTxtDictionary[Key]:
+ if self.TargetTxtDictionary[Key].startswith("Conf/"):
+ Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ if not os.path.exists(Build_Rule) or not os.path.isfile(Build_Rule):
+ # If Conf/Conf does not exist, try just the Conf/ directory
+ Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
+ else:
+ # The File pointed to by BUILD_RULE_CONF is not in a Conf/ directory
+ Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ self.TargetTxtDictionary[Key] = Build_Rule
elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
self.TargetTxtDictionary[Key] = Value.split()
@@ -144,15 +166,15 @@ class TargetTxtClassObject(object):
print Item
## TargetTxtDict
#
-# Load target.txt in input workspace dir
+# Load target.txt in input Conf dir
#
-# @param WorkSpace: Workspace dir
+# @param ConfDir: Conf dir
#
# @retval Target An instance of TargetTxtClassObject() with loaded target.txt
#
-def TargetTxtDict(WorkSpace):
+def TargetTxtDict(ConfDir):
Target = TargetTxtClassObject()
- Target.LoadTargetTxtFile(os.path.normpath(os.path.join(WorkSpace, gDefaultTargetTxtFile)))
+ Target.LoadTargetTxtFile(os.path.normpath(os.path.join(ConfDir, gDefaultTargetTxtFile)))
return Target
##
diff --git a/BaseTools/Source/Python/Common/ToolDefClassObject.py b/BaseTools/Source/Python/Common/ToolDefClassObject.py
index 583d51b513..4fefbd91e0 100644
--- a/BaseTools/Source/Python/Common/ToolDefClassObject.py
+++ b/BaseTools/Source/Python/Common/ToolDefClassObject.py
@@ -29,7 +29,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
gMacroDefPattern = re.compile("DEFINE\s+([^\s]+)")
-gDefaultToolsDefFile = "Conf/tools_def.txt"
+gDefaultToolsDefFile = "tools_def.txt"
## ToolDefClassObject
#
@@ -196,18 +196,23 @@ class ToolDefClassObject(object):
## ToolDefDict
#
-# Load tools_def.txt in input workspace dir
+# Load tools_def.txt in input Conf dir
#
-# @param WorkSpace: Workspace dir
+# @param ConfDir: Conf dir
#
# @retval ToolDef An instance of ToolDefClassObject() with loaded tools_def.txt
#
-def ToolDefDict(WorkSpace):
- Target = TargetTxtDict(WorkSpace)
+def ToolDefDict(ConfDir):
+ Target = TargetTxtDict(ConfDir)
ToolDef = ToolDefClassObject()
if DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF in Target.TargetTxtDictionary:
- gDefaultToolsDefFile = Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
- ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(WorkSpace, gDefaultToolsDefFile)))
+ ToolsDefFile = Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
+ if ToolsDefFile:
+ ToolDef.LoadToolDefFile(os.path.normpath(ToolsDefFile))
+ else:
+ ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(ConfDir, gDefaultToolsDefFile)))
+ else:
+ ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(ConfDir, gDefaultToolsDefFile)))
return ToolDef
##
diff --git a/BaseTools/Source/Python/CommonDataClass/DataClass.py b/BaseTools/Source/Python/CommonDataClass/DataClass.py
index 2cc09ff333..3f6c3c65a5 100644
--- a/BaseTools/Source/Python/CommonDataClass/DataClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/DataClass.py
@@ -1,7 +1,7 @@
## @file
# This file is used to define class for data structure used in ECC
#
-# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -29,6 +29,7 @@ MODEL_FILE_DSC = 1013
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
+MODEL_FILE_UNI = 1017
MODEL_FILE_OTHERS = 1099
MODEL_IDENTIFIER_FILE_HEADER = 2001
diff --git a/BaseTools/Source/Python/GenFds/CompressSection.py b/BaseTools/Source/Python/GenFds/CompressSection.py
index 9d9c9cfdf7..fac58d14f8 100644
--- a/BaseTools/Source/Python/GenFds/CompressSection.py
+++ b/BaseTools/Source/Python/GenFds/CompressSection.py
@@ -18,7 +18,7 @@
from Ffs import Ffs
import Section
import subprocess
-import os
+import Common.LongFilePathOs as os
from GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import CompressSectionClassObject
diff --git a/BaseTools/Source/Python/GenFds/DepexSection.py b/BaseTools/Source/Python/GenFds/DepexSection.py
index c048efe478..8f78c0fad4 100644
--- a/BaseTools/Source/Python/GenFds/DepexSection.py
+++ b/BaseTools/Source/Python/GenFds/DepexSection.py
@@ -24,6 +24,7 @@ from CommonDataClass.FdfClass import DepexSectionClassObject
from AutoGen.GenDepex import DependencyExpression
from Common import EdkLogger
from Common.BuildToolError import *
+from Common.Misc import PathClass
## generate data section
#
@@ -38,10 +39,22 @@ class DepexSection (DepexSectionClassObject):
def __FindGuidValue(self, CName):
for Arch in GenFdsGlobalVariable.ArchList:
- for PkgDb in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,
- Arch,
- GenFdsGlobalVariable.TargetName,
- GenFdsGlobalVariable.ToolChainTag):
+ PkgList = GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,
+ Arch,
+ GenFdsGlobalVariable.TargetName,
+ GenFdsGlobalVariable.ToolChainTag)
+ for Inf in GenFdsGlobalVariable.FdfParser.Profile.InfList:
+ ModuleFile = PathClass(Inf, GenFdsGlobalVariable.WorkSpaceDir)
+ ModuleData = GenFdsGlobalVariable.WorkSpace.BuildObject[
+ ModuleFile,
+ Arch,
+ GenFdsGlobalVariable.TargetName,
+ GenFdsGlobalVariable.ToolChainTag
+ ]
+ for Pkg in ModuleData.Packages:
+ if Pkg not in PkgList:
+ PkgList.append(Pkg)
+ for PkgDb in PkgList:
if CName in PkgDb.Ppis:
return PkgDb.Ppis[CName]
if CName in PkgDb.Protocols:
diff --git a/BaseTools/Source/Python/GenFds/EfiSection.py b/BaseTools/Source/Python/GenFds/EfiSection.py
index 15ebac5705..8f61c4d2db 100644
--- a/BaseTools/Source/Python/GenFds/EfiSection.py
+++ b/BaseTools/Source/Python/GenFds/EfiSection.py
@@ -1,7 +1,7 @@
## @file
# process rule section generation
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -211,10 +211,10 @@ class EfiSection (EfiSectionClassObject):
"""If File List is empty"""
if FileList == [] :
if self.Optional == True:
- GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
- return [], None
+ GenFdsGlobalVariable.VerboseLogger("Optional Section don't exist!")
+ return [], None
else:
- EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
else:
"""Convert the File to Section file one by one """
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py
index 01dab7bdab..f4ccda9374 100644
--- a/BaseTools/Source/Python/GenFds/FdfParser.py
+++ b/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -218,6 +218,7 @@ class FdfParser:
self.CurrentFvName = None
self.__Token = ""
self.__SkippedChars = ""
+ GlobalData.gFdfParser = self
# Used to section info
self.__CurSection = []
@@ -2356,6 +2357,9 @@ class FdfParser:
ffsInf.CurrentLineNum = self.CurrentLineNumber
ffsInf.CurrentLineContent = self.__CurrentLine()
+ #Replace $(SAPCE) with real space
+ ffsInf.InfFileName = ffsInf.InfFileName.replace('$(SPACE)', ' ')
+
if ffsInf.InfFileName.replace('$(WORKSPACE)', '').find('$') == -1:
#do case sensitive check for file path
ErrorCode, ErrorInfo = PathClass(NormPath(ffsInf.InfFileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
@@ -2391,6 +2395,12 @@ class FdfParser:
# @param FfsInfObj for whom option is got
#
def __GetInfOptions(self, FfsInfObj):
+ if self.__IsKeyword("FILE_GUID"):
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextGuid():
+ raise Warning("expected GUID value", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.OverrideGuid = self.__Token
if self.__IsKeyword( "RuleOverride"):
if not self.__IsToken( "="):
@@ -2426,8 +2436,8 @@ class FdfParser:
if self.__GetNextToken():
- p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
- if p.match(self.__Token):
+ p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\))')
+ if p.match(self.__Token) and p.match(self.__Token).span()[1] == len(self.__Token):
FfsInfObj.KeyStringList.append(self.__Token)
if not self.__IsToken(","):
return
@@ -2576,7 +2586,7 @@ class FdfParser:
else:
FfsFileObj.CurrentLineNum = self.CurrentLineNumber
FfsFileObj.CurrentLineContent = self.__CurrentLine()
- FfsFileObj.FileName = self.__Token
+ FfsFileObj.FileName = self.__Token.replace('$(SPACE)', ' ')
self.__VerifyFile(FfsFileObj.FileName)
if not self.__IsToken( "}"):
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
index a765f9e04a..cd099196d0 100644
--- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
@@ -92,6 +92,8 @@ class FileStatement (FileStatementClassObject) :
elif self.FileName != None:
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ #Replace $(SAPCE) with real space
+ self.FileName = self.FileName.replace('$(SPACE)', ' ')
SectionFiles = [GenFdsGlobalVariable.MacroExtend(self.FileName, Dict)]
else:
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
index a7a65919d7..cc85a32796 100644
--- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -30,6 +30,7 @@ from CommonDataClass.FdfClass import FfsInfStatementClassObject
from Common.String import *
from Common.Misc import PathClass
from Common.Misc import GuidStructureByteArrayToGuidString
+from Common.Misc import ProcessDuplicatedInf
from Common import EdkLogger
from Common.BuildToolError import *
from GuidSection import GuidSection
@@ -64,6 +65,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.CurrentLineContent = None
self.FileName = None
self.InfFileName = None
+ self.OverrideGuid = None
+ self.PatchedBinFile = ''
## GetFinalTargetSuffixMap() method
#
@@ -145,7 +148,9 @@ class FfsInfStatement(FfsInfStatementClassObject):
GenFdsGlobalVariable.VerboseLogger( " Begine parsing INf file : %s" %self.InfFileName)
self.InfFileName = self.InfFileName.replace('$(WORKSPACE)', '')
- if self.InfFileName[0] == '\\' or self.InfFileName[0] == '/' :
+ if len(self.InfFileName) > 1 and self.InfFileName[0] == '\\' and self.InfFileName[1] == '\\':
+ pass
+ elif self.InfFileName[0] == '\\' or self.InfFileName[0] == '/' :
self.InfFileName = self.InfFileName[1:]
if self.InfFileName.find('$') == -1:
@@ -164,7 +169,9 @@ class FfsInfStatement(FfsInfStatementClassObject):
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
-
+
+ if self.OverrideGuid:
+ PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
if self.CurrentArch != None:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
@@ -199,6 +206,9 @@ class FfsInfStatement(FfsInfStatementClassObject):
"INF %s specified in FDF could not be found in build ARCH %s!" \
% (self.InfFileName, GenFdsGlobalVariable.ArchList))
+ if self.OverrideGuid:
+ self.ModuleGuid = self.OverrideGuid
+
if len(self.SourceFileList) != 0 and not self.InDsc:
EdkLogger.warn("GenFds", GENFDS_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % (self.InfFileName))
@@ -285,8 +295,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
or PcdValueInImg > FfsInfStatement._MAX_SIZE_TYPE[Pcd.DatumType]:
EdkLogger.error("GenFds", GENFDS_ERROR, "The size of %s type PCD '%s.%s' doesn't match its data type." \
% (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
- Pcd.DefaultValue = DefaultValue
- self.PatchPcds.append(Pcd)
+ self.PatchPcds.append((Pcd, DefaultValue))
+
self.InfModule = Inf
self.PcdIsDriver = Inf.PcdIsDriver
self.IsBinaryModule = Inf.IsBinaryModule
@@ -308,7 +318,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.EfiOutputPath = self.__GetEFIOutPutPath__()
GenFdsGlobalVariable.VerboseLogger( "ModuelEFIPath: " + self.EfiOutputPath)
-## PatchEfiFile
+ ## PatchEfiFile
#
# Patch EFI file with patch PCD
#
@@ -316,18 +326,25 @@ class FfsInfStatement(FfsInfStatementClassObject):
# @retval: Full path of patched EFI file: self.OutputPath + EfiFile base name
# If passed in file does not end with efi, return as is
#
- def PatchEfiFile(self, EfiFile):
- if os.path.splitext(EfiFile)[1].lower() != '.efi':
- return EfiFile
+ def PatchEfiFile(self, EfiFile, FileType):
if not self.PatchPcds:
return EfiFile
+ if FileType != 'PE32' and self.ModuleType != "USER_DEFINED":
+ return EfiFile
+ if self.PatchedBinFile:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'Only one binary file can be patched:\n'
+ ' a binary file has been patched: %s\n'
+ ' current file: %s' % (self.PatchedBinFile, EfiFile),
+ File=self.InfFileName)
Basename = os.path.basename(EfiFile)
Output = os.path.join(self.OutputPath, Basename)
CopyLongFilePath(EfiFile, Output)
- for Pcd in self.PatchPcds:
- RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Pcd.DefaultValue, Pcd.MaxDatumSize)
+ for Pcd, Value in self.PatchPcds:
+ RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Value, Pcd.MaxDatumSize)
if RetVal:
EdkLogger.error("GenFds", GENFDS_ERROR, RetStr, File=self.InfFileName)
+ self.PatchedBinFile = os.path.normpath(EfiFile)
return Output
## GenFfs() method
#
@@ -349,7 +366,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# Allow binary type module not specify override rule in FDF file.
#
- if len(self.BinFileList) >0 and not self.InDsc:
+ if len(self.BinFileList) > 0:
if self.Rule == None or self.Rule == "":
self.Rule = "BINARY"
@@ -568,6 +585,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
(ModulePath, FileName) = os.path.split(self.InfFileName)
Index = FileName.rfind('.')
FileName = FileName[0:Index]
+ if self.OverrideGuid:
+ FileName = self.OverrideGuid
Arch = "NoneArch"
if self.CurrentArch != None:
Arch = self.CurrentArch
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py
index c5ccda677b..c4e433d425 100644
--- a/BaseTools/Source/Python/GenFds/GenFds.py
+++ b/BaseTools/Source/Python/GenFds/GenFds.py
@@ -36,6 +36,7 @@ from Common import EdkLogger
from Common.String import *
from Common.Misc import DirCache,PathClass
from Common.Misc import SaveFileOnChange
+from Common.Misc import ClearDuplicatedInf
from Common.Misc import GuidStructureStringToGuidString
from Common.BuildVersion import gBUILD_VERSION
@@ -149,15 +150,36 @@ def main():
GenFdsGlobalVariable.ActivePlatform = PathClass(NormPath(ActivePlatform), Workspace)
- BuildConfigurationFile = os.path.normpath(os.path.join(GenFdsGlobalVariable.WorkSpaceDir, "Conf/target.txt"))
+ if (Options.ConfDirectory):
+ # Get alternate Conf location, if it is absolute, then just use the absolute directory name
+ ConfDirectoryPath = os.path.normpath(Options.ConfDirectory)
+ if ConfDirectoryPath.startswith('"'):
+ ConfDirectoryPath = ConfDirectoryPath[1:]
+ if ConfDirectoryPath.endswith('"'):
+ ConfDirectoryPath = ConfDirectoryPath[:-1]
+ if not os.path.isabs(ConfDirectoryPath):
+ # Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE
+ # This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf
+ ConfDirectoryPath = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, ConfDirectoryPath)
+ else:
+ # Get standard WORKSPACE/Conf, use the absolute path to the WORKSPACE/Conf
+ ConfDirectoryPath = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, 'Conf')
+ GenFdsGlobalVariable.ConfDir = ConfDirectoryPath
+ BuildConfigurationFile = os.path.normpath(os.path.join(ConfDirectoryPath, "target.txt"))
if os.path.isfile(BuildConfigurationFile) == True:
TargetTxtClassObject.TargetTxtClassObject(BuildConfigurationFile)
else:
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=BuildConfigurationFile)
+ #Set global flag for build mode
+ GlobalData.gIgnoreSource = Options.IgnoreSources
+
if Options.Macros:
for Pair in Options.Macros:
- Pair = Pair.strip('"')
+ if Pair.startswith('"'):
+ Pair = Pair[1:]
+ if Pair.endswith('"'):
+ Pair = Pair[:-1]
List = Pair.split('=')
if len(List) == 2:
if List[0].strip() == "EFI_SOURCE":
@@ -177,7 +199,8 @@ def main():
os.environ["WORKSPACE"] = Workspace
"""call Workspace build create database"""
- BuildWorkSpace = WorkspaceDatabase(None)
+ GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
+ BuildWorkSpace = WorkspaceDatabase(GlobalData.gDatabasePath)
BuildWorkSpace.InitDatabase()
#
@@ -276,11 +299,13 @@ def main():
"\nPython",
CODE_ERROR,
"Tools code failure",
- ExtraData="Please send email to edk2-buildtools-devel@lists.sourceforge.net for help, attaching following call stack trace!\n",
+ ExtraData="Please send email to edk2-devel@lists.sourceforge.net for help, attaching following call stack trace!\n",
RaiseError=False
)
EdkLogger.quiet(traceback.format_exc())
ReturnCode = CODE_ERROR
+ finally:
+ ClearDuplicatedInf()
return ReturnCode
gParamCheck = []
@@ -321,6 +346,9 @@ def myOptionParser():
action="callback", callback=SingleCheckCallback)
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
Parser.add_option("-s", "--specifyaddress", dest="FixedAddress", action="store_true", type=None, help="Specify driver load address.")
+ Parser.add_option("--conf", action="store", type="string", dest="ConfDirectory", help="Specify the customized Conf directory.")
+ Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
+
(Options, args) = Parser.parse_args()
return Options
diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
index f97b41b834..04bbc300ce 100644
--- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
+++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
@@ -45,6 +45,7 @@ class GenFdsGlobalVariable:
LibDir = ''
WorkSpace = None
WorkSpaceDir = ''
+ ConfDir = ''
EdkSourceDir = ''
OutputDirFromDscDict = {}
TargetName = ''
@@ -88,7 +89,7 @@ class GenFdsGlobalVariable:
def __LoadBuildRule():
if GenFdsGlobalVariable.__BuildRuleDatabase:
return GenFdsGlobalVariable.__BuildRuleDatabase
- BuildConfigurationFile = os.path.normpath(os.path.join(GenFdsGlobalVariable.WorkSpaceDir, "Conf/target.txt"))
+ BuildConfigurationFile = os.path.normpath(os.path.join(GenFdsGlobalVariable.ConfDir, "target.txt"))
TargetTxt = TargetTxtClassObject()
if os.path.isfile(BuildConfigurationFile) == True:
TargetTxt.LoadTargetTxtFile(BuildConfigurationFile)
@@ -201,11 +202,13 @@ class GenFdsGlobalVariable:
TargetList = set()
FileList = []
- for File in Inf.Sources:
- if File.TagName in ("", "*", GenFdsGlobalVariable.ToolChainTag) and \
- File.ToolChainFamily in ("", "*", GenFdsGlobalVariable.ToolChainFamily):
- FileList.append((File, DataType.TAB_UNKNOWN_FILE))
-
+
+ if not Inf.IsBinaryModule:
+ for File in Inf.Sources:
+ if File.TagName in ("", "*", GenFdsGlobalVariable.ToolChainTag) and \
+ File.ToolChainFamily in ("", "*", GenFdsGlobalVariable.ToolChainFamily):
+ FileList.append((File, DataType.TAB_UNKNOWN_FILE))
+
for File in Inf.Binaries:
if File.Target in ['COMMON', '*', GenFdsGlobalVariable.TargetName]:
FileList.append((File, File.Type))
@@ -645,7 +648,8 @@ class GenFdsGlobalVariable:
'$(EDK_SOURCE)' : GenFdsGlobalVariable.EdkSourceDir,
# '$(OUTPUT_DIRECTORY)': GenFdsGlobalVariable.OutputDirFromDsc,
'$(TARGET)' : GenFdsGlobalVariable.TargetName,
- '$(TOOL_CHAIN_TAG)' : GenFdsGlobalVariable.ToolChainTag
+ '$(TOOL_CHAIN_TAG)' : GenFdsGlobalVariable.ToolChainTag,
+ '$(SPACE)' : ' '
}
OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[GenFdsGlobalVariable.ArchList[0]]
if Arch != 'COMMON' and Arch in GenFdsGlobalVariable.ArchList:
diff --git a/BaseTools/Source/Python/GenFds/GuidSection.py b/BaseTools/Source/Python/GenFds/GuidSection.py
index 97d8ba8f76..0e5bb3b270 100644
--- a/BaseTools/Source/Python/GenFds/GuidSection.py
+++ b/BaseTools/Source/Python/GenFds/GuidSection.py
@@ -198,7 +198,7 @@ class GuidSection(GuidSectionClassObject) :
HeaderLength = str(self.ExtraHeaderSize)
if self.ProcessRequired == "NONE" and HeaderLength == None:
- if TempFileSize > InputFileSize and TempFileSize % 4 == 0:
+ if TempFileSize > InputFileSize:
FileHandleIn.seek(0)
BufferIn = FileHandleIn.read()
FileHandleOut.seek(0)
@@ -247,15 +247,15 @@ class GuidSection(GuidSectionClassObject) :
if self.KeyStringList == None or self.KeyStringList == []:
Target = GenFdsGlobalVariable.TargetName
ToolChain = GenFdsGlobalVariable.ToolChainTag
- ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.WorkSpaceDir).ToolsDefTxtDatabase
+ ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)
self.KeyStringList = [Target+'_'+ToolChain+'_'+self.CurrentArchList[0]]
for Arch in self.CurrentArchList:
- if Target+'_'+ToolChain+'_'+Arch not in self.KeyStringList:
- self.KeyStringList.append(Target+'_'+ToolChain+'_'+Arch)
-
- ToolDefinition = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.WorkSpaceDir).ToolsDefTxtDictionary
+ if Target + '_' + ToolChain + '_' + Arch not in self.KeyStringList:
+ self.KeyStringList.append(Target + '_' + ToolChain + '_' + Arch)
+
+ ToolDefinition = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDictionary
ToolPathTmp = None
for ToolDef in ToolDefinition.items():
if self.NameGuid == ToolDef[1]:
diff --git a/BaseTools/Source/Python/GenFds/Section.py b/BaseTools/Source/Python/GenFds/Section.py
index e32041d6a5..c67177bb17 100644
--- a/BaseTools/Source/Python/GenFds/Section.py
+++ b/BaseTools/Source/Python/GenFds/Section.py
@@ -129,9 +129,11 @@ class Section (SectionClassObject):
if FileType != None:
for File in FfsInf.BinFileList:
if File.Arch == "COMMON" or FfsInf.CurrentArch == File.Arch:
- if File.Type == FileType or (int(FfsInf.PiSpecVersion, 16) >= 0x0001000A and FileType == 'DXE_DPEX'and File.Type == 'SMM_DEPEX'):
+ if File.Type == FileType or (int(FfsInf.PiSpecVersion, 16) >= 0x0001000A \
+ and FileType == 'DXE_DPEX'and File.Type == 'SMM_DEPEX') \
+ or (FileType == 'TE'and File.Type == 'PE32'):
if '*' in FfsInf.TargetOverrideList or File.Target == '*' or File.Target in FfsInf.TargetOverrideList or FfsInf.TargetOverrideList == []:
- FileList.append(FfsInf.PatchEfiFile(File.Path))
+ FileList.append(FfsInf.PatchEfiFile(File.Path, File.Type))
else:
GenFdsGlobalVariable.InfLogger ("\nBuild Target \'%s\' of File %s is not in the Scope of %s specified by INF %s in FDF" %(File.Target, File.File, FfsInf.TargetOverrideList, FfsInf.InfFileName))
else:
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py
index 7a366db5fb..bfdf763a7a 100644
--- a/BaseTools/Source/Python/TargetTool/TargetTool.py
+++ b/BaseTools/Source/Python/TargetTool/TargetTool.py
@@ -1,3 +1,5 @@
+## @file
+# Target Tool Parser
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
#
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py
index 13485c72da..7df83649d0 100644
--- a/BaseTools/Source/Python/Trim/Trim.py
+++ b/BaseTools/Source/Python/Trim/Trim.py
@@ -594,7 +594,7 @@ def Main():
"\nTrim",
CODE_ERROR,
"Unknown fatal error when trimming [%s]" % InputFile,
- ExtraData="\n(Please send email to edk2-buildtools-devel@lists.sourceforge.net for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk2-devel@lists.sourceforge.net for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
diff --git a/BaseTools/Source/Python/UPT/Logger/StringTable.py b/BaseTools/Source/Python/UPT/Logger/StringTable.py
index 5b6973c944..54993f26fe 100644
--- a/BaseTools/Source/Python/UPT/Logger/StringTable.py
+++ b/BaseTools/Source/Python/UPT/Logger/StringTable.py
@@ -320,7 +320,7 @@ MSG_NEW_FILE_NAME_FOR_DIST = _(
MSG_UPDATE_PACKAGE_DATABASE = _("Update Distribution Package Database ...")
MSG_PYTHON_ON = _("(Python %s on %s) ")
MSG_SEARCH_FOR_HELP = _(
- "\n(Please send email to edk2-buildtools-devel@lists.sourceforge.net for\n"
+ "\n(Please send email to edk2-devel@lists.sourceforge.net for\n"
" help, attach the following call stack trace.)\n")
MSG_REMOVE_TEMP_FILE_STARTED = _("Removing temp files started ... ")
MSG_REMOVE_TEMP_FILE_DONE = _("Removing temp files ... Done.")
diff --git a/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py b/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
index ee6f5ac2b8..df1e90faf5 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
@@ -25,4 +25,27 @@ import Common.EdkLogger as EdkLogger
UsageList = ("PRODUCES", "PRODUCED", "ALWAYS_PRODUCES", "ALWAYS_PRODUCED", "SOMETIMES_PRODUCES",
"SOMETIMES_PRODUCED", "CONSUMES", "CONSUMED", "ALWAYS_CONSUMES", "ALWAYS_CONSUMED",
"SOMETIMES_CONSUMES", "SOMETIMES_CONSUMED", "SOMETIME_CONSUMES")
+ErrorMsgMap = {
+ MODEL_EFI_GUID : "The usage for this GUID is not listed in this INF: %s[%d]:%s",
+ MODEL_EFI_PPI : "The usage for this PPI is not listed in this INF: %s[%d]:%s.",
+ MODEL_EFI_PROTOCOL : "The usage for this Protocol is not listed in this INF: %s[%d]:%s.",
+ MODEL_PCD_DYNAMIC : "The usage for this PCD is not listed in this INF: %s[%d]:%s."
+}
+def CheckInfComment(SectionType, Comments, InfFile, LineNo, ValueList):
+ if SectionType in [MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_EX, MODEL_PCD_DYNAMIC]:
+ CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0]+'.'+ValueList[1], ErrorMsgMap[MODEL_PCD_DYNAMIC])
+ elif SectionType in [MODEL_EFI_GUID, MODEL_EFI_PPI]:
+ CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
+ elif SectionType == MODEL_EFI_PROTOCOL:
+ CheckUsage(Comments, UsageList + ("TO_START", "BY_START"), InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
+
+def CheckUsage(Comments, Usages, InfFile, LineNo, Value, ErrorMsg):
+ for Comment in Comments:
+ for Word in Comment[0].replace('#', ' ').split():
+ if Word in Usages:
+ return
+ EdkLogger.error(
+ "Parser", FORMAT_INVALID,
+ ErrorMsg % (InfFile, LineNo, Value)
+ )
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
index 98939093d3..53b44f4403 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -31,6 +31,7 @@ from CommonDataClass.Exceptions import *
from Common.LongFilePathSupport import OpenLongFilePath as open
from MetaFileTable import MetaFileStorage
+from MetaFileCommentParser import CheckInfComment
## A decorator used to parse macro definition
def ParseMacro(Parser):
@@ -595,6 +596,8 @@ class InfParser(MetaFileParser):
continue
if Comment:
Comments.append((Comment, Index + 1))
+ if GlobalData.gOptions and GlobalData.gOptions.CheckUsage:
+ CheckInfComment(self._SectionType, Comments, str(self.MetaFile), Index + 1, self._ValueList)
#
# Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
@@ -823,6 +826,10 @@ class DscParser(MetaFileParser):
"FIX_LOAD_TOP_MEMORY_ADDRESS"
]
+ SubSectionDefineKeywords = [
+ "FILE_GUID"
+ ]
+
SymbolPattern = ValueExpression.SymbolPattern
## Constructor of DscParser
@@ -1041,13 +1048,15 @@ class DscParser(MetaFileParser):
if not self._ValueList[2]:
EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- if not self._ValueList[1] in self.DefineKeywords:
+ if (not self._ValueList[1] in self.DefineKeywords and
+ (self._InSubsection and self._ValueList[1] not in self.SubSectionDefineKeywords)):
EdkLogger.error('Parser', FORMAT_INVALID,
"Unknown keyword found: %s. "
"If this is a macro you must "
"add it as a DEFINE in the DSC" % self._ValueList[1],
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- self._Defines[self._ValueList[1]] = self._ValueList[2]
+ if not self._InSubsection:
+ self._Defines[self._ValueList[1]] = self._ValueList[2]
self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
@ParseMacro
@@ -1226,6 +1235,7 @@ class DscParser(MetaFileParser):
self.__RetrievePcdValue()
self._Content = self._RawTable.GetAll()
self._ContentIndex = 0
+ self._InSubsection = False
while self._ContentIndex < len(self._Content) :
Id, self._ItemType, V1, V2, V3, S1, S2, Owner, self._From, \
LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
@@ -1254,6 +1264,10 @@ class DscParser(MetaFileParser):
self._LineIndex = LineStart - 1
self._ValueList = [V1, V2, V3]
+ if Owner > 0 and Owner in self._IdMapping:
+ self._InSubsection = True
+ else:
+ self._InSubsection = False
try:
Processer[self._ItemType]()
except EvaluationException, Excpt:
@@ -1356,6 +1370,13 @@ class DscParser(MetaFileParser):
Type, Name, Value = self._ValueList
Value = ReplaceMacro(Value, self._Macros, False)
+ #
+ # If it is <Defines>, return
+ #
+ if self._InSubsection:
+ self._ValueList = [Type, Name, Value]
+ return
+
if self._ItemType == MODEL_META_DATA_DEFINE:
if self._SectionType == MODEL_META_DATA_HEADER:
self._FileLocalMacros[Name] = Value
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
index 638df1f6ed..83f730e629 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -35,9 +35,12 @@ from MetaFileParser import *
from BuildClassObject import *
from WorkspaceCommon import GetDeclaredPcd
from Common.Misc import AnalyzeDscPcd
+from Common.Misc import ProcessDuplicatedInf
import re
from Common.Parsing import IsValidWord
+import Common.GlobalData as GlobalData
+
## Platform build information from DSC file
#
# This class is used to retrieve information stored in database and convert them
@@ -103,6 +106,7 @@ class DscBuildData(PlatformBuildClassObject):
self._Target = Target
self._Toolchain = Toolchain
self._Clear()
+ self._HandleOverridePath()
## XXX[key] = value
def __setitem__(self, key, value):
@@ -147,6 +151,27 @@ class DscBuildData(PlatformBuildClassObject):
self._VpdToolGuid = None
self.__Macros = None
+
+ ## handle Override Path of Module
+ def _HandleOverridePath(self):
+ RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
+ Macros = self._Macros
+ Macros["EDK_SOURCE"] = GlobalData.gEcpSource
+ for Record in RecordList:
+ ModuleId = Record[5]
+ LineNo = Record[6]
+ ModuleFile = PathClass(NormPath(Record[0]), GlobalData.gWorkspace, Arch=self._Arch)
+ RecordList = self._RawData[MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, self._Arch, None, ModuleId]
+ if RecordList != []:
+ SourceOverridePath = os.path.join(GlobalData.gWorkspace, NormPath(RecordList[0][0]))
+
+ # Check if the source override path exists
+ if not os.path.isdir(SourceOverridePath):
+ EdkLogger.error('build', FILE_NOT_FOUND, Message='Source override path does not exist:', File=self.MetaFile, ExtraData=SourceOverridePath, Line=LineNo)
+
+ #Add to GlobalData Variables
+ GlobalData.gOverrideDir[ModuleFile.Key] = SourceOverridePath
+
## Get current effective macros
def _GetMacros(self):
if self.__Macros == None:
@@ -478,6 +503,7 @@ class DscBuildData(PlatformBuildClassObject):
Macros = self._Macros
Macros["EDK_SOURCE"] = GlobalData.gEcpSource
for Record in RecordList:
+ DuplicatedFile = False
ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
ModuleId = Record[5]
LineNo = Record[6]
@@ -490,23 +516,11 @@ class DscBuildData(PlatformBuildClassObject):
# Check duplication
# If arch is COMMON, no duplicate module is checked since all modules in all component sections are selected
if self._Arch != 'COMMON' and ModuleFile in self._Modules:
- EdkLogger.error('build', FILE_DUPLICATED, File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
+ DuplicatedFile = True
Module = ModuleBuildClassObject()
Module.MetaFile = ModuleFile
- # get module override path
- RecordList = self._RawData[MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, self._Arch, None, ModuleId]
- if RecordList != []:
- Module.SourceOverridePath = os.path.join(GlobalData.gWorkspace, NormPath(RecordList[0][0], Macros))
-
- # Check if the source override path exists
- if not os.path.isdir(Module.SourceOverridePath):
- EdkLogger.error('build', FILE_NOT_FOUND, Message = 'Source override path does not exist:', File=self.MetaFile, ExtraData=Module.SourceOverridePath, Line=LineNo)
-
- #Add to GlobalData Variables
- GlobalData.gOverrideDir[ModuleFile.Key] = Module.SourceOverridePath
-
# get module private library instance
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId]
for Record in RecordList:
@@ -563,6 +577,16 @@ class DscBuildData(PlatformBuildClassObject):
OptionString = Module.BuildOptions[ToolChainFamily, ToolChain]
Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, None, ModuleId]
+ if DuplicatedFile and not RecordList:
+ EdkLogger.error('build', FILE_DUPLICATED, File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
+ if RecordList:
+ if len(RecordList) != 1:
+ EdkLogger.error('build', OPTION_UNKNOWN, 'Only FILE_GUID can be listed in <Defines> section.',
+ File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
+ ModuleFile = ProcessDuplicatedInf(ModuleFile, RecordList[0][2], GlobalData.gWorkspace)
+ ModuleFile.Arch = self._Arch
+
self._Modules[ModuleFile] = Module
return self._Modules
@@ -641,9 +665,26 @@ class DscBuildData(PlatformBuildClassObject):
def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
if self._DecPcds == None:
self._DecPcds = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain)
+ FdfInfList = []
+ if GlobalData.gFdfParser:
+ FdfInfList = GlobalData.gFdfParser.Profile.InfList
+
+ PkgSet = set()
+ for Inf in FdfInfList:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch)
+ if ModuleFile in self._Modules:
+ continue
+ ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
+ PkgSet.update(ModuleData.Packages)
+ DecPcds = {}
+ for Pkg in PkgSet:
+ for Pcd in Pkg.Pcds:
+ DecPcds[Pcd[0], Pcd[1]] = Pkg.Pcds[Pcd]
+ self._DecPcds.update(DecPcds)
+
if (PcdCName, TokenSpaceGuid) not in self._DecPcds:
EdkLogger.error('build', PARSER_ERROR,
- "Pcd (%s.%s) defined in DSC is not declared in DEC files." % (TokenSpaceGuid, PcdCName),
+ "Pcd (%s.%s) defined in DSC is not declared in DEC files. Arch: ['%s']" % (TokenSpaceGuid, PcdCName, self._Arch),
File=self.MetaFile, Line=LineNo)
ValueList, IsValid, Index = AnalyzeDscPcd(Setting, PcdType, self._DecPcds[PcdCName, TokenSpaceGuid].DatumType)
if not IsValid and PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
@@ -1676,6 +1717,9 @@ class InfBuildData(ModuleBuildClassObject):
# items defined _PROPERTY_ don't need additional processing
if Name in self:
self[Name] = Value
+ if self._Defs == None:
+ self._Defs = sdict()
+ self._Defs[Name] = Value
# some special items in [Defines] section need special treatment
elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
@@ -2309,6 +2353,13 @@ class InfBuildData(ModuleBuildClassObject):
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No [Depex] section or no valid expression in [Depex] section for [%s] module" \
% self.ModuleType, File=self.MetaFile)
+ if len(RecordList) != 0 and self.ModuleType == 'USER_DEFINED':
+ for Record in RecordList:
+ if Record[4] not in ['PEIM', 'DXE_DRIVER', 'DXE_SMM_DRIVER']:
+ EdkLogger.error('build', FORMAT_INVALID,
+ "'%s' module must specify the type of [Depex] section" % self.ModuleType,
+ File=self.MetaFile)
+
Depex = sdict()
for Record in RecordList:
DepexStr = ReplaceMacro(Record[0], self._Macros, False)
@@ -2570,8 +2621,6 @@ class InfBuildData(ModuleBuildClassObject):
#
class WorkspaceDatabase(object):
- # default database file path
- _DB_PATH_ = "Conf/.cache/build.db"
#
# internal class used for call corresponding file parser and caching the result
@@ -2682,7 +2731,7 @@ class WorkspaceDatabase(object):
def __init__(self, DbPath, RenewDb=False):
self._DbClosedFlag = False
if not DbPath:
- DbPath = os.path.normpath(os.path.join(GlobalData.gWorkspace, self._DB_PATH_))
+ DbPath = os.path.normpath(os.path.join(GlobalData.gWorkspace, 'Conf', GlobalData.gDatabasePath))
# don't create necessary path for db in memory
if DbPath != ':memory:':
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py
index 2f8bfb42b1..a5bb70d456 100644
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -57,9 +57,8 @@ __copyright__ = "Copyright (c) 2007 - 2014, Intel Corporation All rights reserv
gSupportedTarget = ['all', 'genc', 'genmake', 'modules', 'libraries', 'fds', 'clean', 'cleanall', 'cleanlib', 'run']
## build configuration file
-gBuildConfiguration = "Conf/target.txt"
-gBuildCacheDir = "Conf/.cache"
-gToolsDefinition = "Conf/tools_def.txt"
+gBuildConfiguration = "target.txt"
+gToolsDefinition = "tools_def.txt"
TemporaryTablePattern = re.compile(r'^_\d+_\d+_[a-fA-F0-9]+$')
TmpTableDict = {}
@@ -630,7 +629,8 @@ class BuildTask:
#
def AddDependency(self, Dependency):
for Dep in Dependency:
- self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list
+ if not Dep.BuildObject.IsBinaryModule:
+ self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list
## The thread wrapper of LaunchCommand function
#
@@ -732,18 +732,34 @@ class Build():
self.SkipAutoGen = BuildOptions.SkipAutoGen
self.Reparse = BuildOptions.Reparse
self.SkuId = BuildOptions.SkuId
+ self.ConfDirectory = BuildOptions.ConfDirectory
self.SpawnMode = True
self.BuildReport = BuildReport(BuildOptions.ReportFile, BuildOptions.ReportType)
self.TargetTxt = TargetTxtClassObject()
self.ToolDef = ToolDefClassObject()
#Set global flag for build mode
GlobalData.gIgnoreSource = BuildOptions.IgnoreSources
+
+ if self.ConfDirectory:
+ # Get alternate Conf location, if it is absolute, then just use the absolute directory name
+ ConfDirectoryPath = os.path.normpath(self.ConfDirectory)
+
+ if not os.path.isabs(ConfDirectoryPath):
+ # Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE
+ # This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf
+ ConfDirectoryPath = os.path.join(self.WorkspaceDir, ConfDirectoryPath)
+ else:
+ # Get standard WORKSPACE/Conf use the absolute path to the WORKSPACE/Conf
+ ConfDirectoryPath = os.path.join(self.WorkspaceDir, 'Conf')
+ GlobalData.gConfDirectory = ConfDirectoryPath
+ GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
+
if BuildOptions.DisableCache:
self.Db = WorkspaceDatabase(":memory:")
else:
- self.Db = WorkspaceDatabase(None, self.Reparse)
- self.BuildDatabase = self.Db.BuildObject
- self.Platform = None
+ self.Db = WorkspaceDatabase(GlobalData.gDatabasePath, self.Reparse)
+ self.BuildDatabase = self.Db.BuildObject
+ self.Platform = None
self.LoadFixAddress = 0
self.UniFlag = BuildOptions.Flag
self.BuildModules = []
@@ -772,14 +788,14 @@ class Build():
#
# Check target.txt and tools_def.txt and Init them
#
- BuildConfigurationFile = os.path.normpath(os.path.join(self.WorkspaceDir, gBuildConfiguration))
+ BuildConfigurationFile = os.path.normpath(os.path.join(GlobalData.gConfDirectory, gBuildConfiguration))
if os.path.isfile(BuildConfigurationFile) == True:
StatusCode = self.TargetTxt.LoadTargetTxtFile(BuildConfigurationFile)
ToolDefinitionFile = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
if ToolDefinitionFile == '':
ToolDefinitionFile = gToolsDefinition
- ToolDefinitionFile = os.path.normpath(os.path.join(self.WorkspaceDir, ToolDefinitionFile))
+ ToolDefinitionFile = os.path.normpath(os.path.join(self.WorkspaceDir, 'Conf', ToolDefinitionFile))
if os.path.isfile(ToolDefinitionFile) == True:
StatusCode = self.ToolDef.LoadToolDefFile(ToolDefinitionFile)
else:
@@ -1079,7 +1095,7 @@ class Build():
# First should close DB.
#
self.Db.Close()
- RemoveDirectory(gBuildCacheDir, True)
+ RemoveDirectory(os.path.dirname(GlobalData.gDatabasePath), True)
except WindowsError, X:
EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X))
return True
@@ -1804,19 +1820,19 @@ class Build():
EdkLogger.SetLevel(OldLogLevel)
def DumpBuildData(self):
- CacheDirectory = os.path.join(self.WorkspaceDir, gBuildCacheDir)
+ CacheDirectory = os.path.dirname(GlobalData.gDatabasePath)
Utils.CreateDirectory(CacheDirectory)
Utils.DataDump(Utils.gFileTimeStampCache, os.path.join(CacheDirectory, "gFileTimeStampCache"))
Utils.DataDump(Utils.gDependencyDatabase, os.path.join(CacheDirectory, "gDependencyDatabase"))
def RestoreBuildData(self):
- FilePath = os.path.join(self.WorkspaceDir, gBuildCacheDir, "gFileTimeStampCache")
+ FilePath = os.path.join(os.path.dirname(GlobalData.gDatabasePath), "gFileTimeStampCache")
if Utils.gFileTimeStampCache == {} and os.path.isfile(FilePath):
Utils.gFileTimeStampCache = Utils.DataRestore(FilePath)
if Utils.gFileTimeStampCache == None:
Utils.gFileTimeStampCache = {}
- FilePath = os.path.join(self.WorkspaceDir, gBuildCacheDir, "gDependencyDatabase")
+ FilePath = os.path.join(os.path.dirname(GlobalData.gDatabasePath), "gDependencyDatabase")
if Utils.gDependencyDatabase == {} and os.path.isfile(FilePath):
Utils.gDependencyDatabase = Utils.DataRestore(FilePath)
if Utils.gDependencyDatabase == None:
@@ -1905,6 +1921,8 @@ def MyOptionParser():
"This option can also be specified by setting *_*_*_BUILD_FLAGS in [BuildOptions] section of platform DSC. If they are both specified, this value "\
"will override the setting in [BuildOptions] section of platform DSC.")
Parser.add_option("-N", "--no-cache", action="store_true", dest="DisableCache", default=False, help="Disable build cache mechanism")
+ Parser.add_option("--conf", action="store", type="string", dest="ConfDirectory", help="Specify the customized Conf directory.")
+ Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.")
Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
(Opt, Args)=Parser.parse_args()
@@ -2064,13 +2082,14 @@ def Main():
"\nbuild",
CODE_ERROR,
"Unknown fatal error when processing [%s]" % MetaFile,
- ExtraData="\n(Please send email to edk2-buildtools-devel@lists.sourceforge.net for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk2-devel@lists.sourceforge.net for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
ReturnCode = CODE_ERROR
finally:
Utils.Progressor.Abort()
+ Utils.ClearDuplicatedInf()
if ReturnCode == 0:
Conclusion = "Done"
diff --git a/BaseTools/Source/Python/sitecustomize.py b/BaseTools/Source/Python/sitecustomize.py
index ec463ea5fa..4ea84c5129 100644
--- a/BaseTools/Source/Python/sitecustomize.py
+++ b/BaseTools/Source/Python/sitecustomize.py
@@ -1,5 +1,7 @@
+## @file
#
-# Copyright (c) 2009 - 2010, Apple Inc. All rights reserved.<BR>
+#
+# Copyright (c) 2009 - 2014, Apple Inc. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License