diff options
Diffstat (limited to 'BaseTools/Source/Python/build')
-rwxr-xr-x | BaseTools/Source/Python/build/build.py | 276 |
1 files changed, 120 insertions, 156 deletions
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index 6fcfc7ec7a..8a8e32e496 100755 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -620,7 +620,7 @@ class BuildTask: #
def AddDependency(self, Dependency):
for Dep in Dependency:
- if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gCacheIR):
+ if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gModuleCacheHit):
self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list
## The thread wrapper of LaunchCommand function
@@ -633,10 +633,13 @@ class BuildTask: self.BuildItem.BuildObject.BuildTime = LaunchCommand(Command, WorkingDir,self.BuildItem.BuildObject)
self.CompleteFlag = True
- # Run hash operation post dependency, to account for libs
- if GlobalData.gUseHashCache and self.BuildItem.BuildObject.IsLibrary:
- HashFile = path.join(self.BuildItem.BuildObject.BuildDir, self.BuildItem.BuildObject.Name + ".hash")
- SaveFileOnChange(HashFile, self.BuildItem.BuildObject.GenModuleHash(), True)
+ # Run hash operation post dependency to account for libs
+ # Run if --hash or --binary-destination
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ self.BuildItem.BuildObject.GenModuleHash()
+ if GlobalData.gBinCacheDest:
+ self.BuildItem.BuildObject.GenCMakeHash()
+
except:
#
# TRICK: hide the output of threads left running, so that the user can
@@ -653,14 +656,6 @@ class BuildTask: BuildTask._ErrorMessage = "%s broken\n %s [%s]" % \
(threading.currentThread().getName(), Command, WorkingDir)
- # Set the value used by hash invalidation flow in GlobalData.gModuleBuildTracking to 'SUCCESS'
- # If Module or Lib is being tracked, it did not fail header check test, and built successfully
- if (self.BuildItem.BuildObject in GlobalData.gModuleBuildTracking and
- GlobalData.gModuleBuildTracking[self.BuildItem.BuildObject] != 'FAIL_METAFILE' and
- not BuildTask._ErrorFlag.isSet()
- ):
- GlobalData.gModuleBuildTracking[self.BuildItem.BuildObject] = 'SUCCESS'
-
# indicate there's a thread is available for another build task
BuildTask._RunningQueueLock.acquire()
BuildTask._RunningQueue.pop(self.BuildItem)
@@ -835,11 +830,20 @@ class Build(): self.AutoGenMgr = None
EdkLogger.info("")
os.chdir(self.WorkspaceDir)
- GlobalData.gCacheIR = Manager().dict()
self.log_q = log_q
GlobalData.file_lock = mp.Lock()
- GlobalData.cache_lock = mp.Lock()
- def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,share_data):
+ # Init cache data for local only
+ GlobalData.gPackageHashFile = dict()
+ GlobalData.gModulePreMakeCacheStatus = dict()
+ GlobalData.gModuleMakeCacheStatus = dict()
+ GlobalData.gHashChainStatus = dict()
+ GlobalData.gCMakeHashFile = dict()
+ GlobalData.gModuleHashFile = dict()
+ GlobalData.gFileHashDict = dict()
+ GlobalData.gModuleAllCacheStatus = set()
+ GlobalData.gModuleCacheHit = set()
+
+ def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,cqueue):
try:
if SkipAutoGen:
return True,0
@@ -849,29 +853,27 @@ class Build(): if FfsCmd is None:
FfsCmd = {}
GlobalData.FfsCmd = FfsCmd
- GlobalData.libConstPcd = DataPipe.Get("LibConstPcd")
- GlobalData.Refes = DataPipe.Get("REFS")
- auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,GlobalData.file_lock,GlobalData.cache_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)]
+ auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,GlobalData.file_lock,cqueue,self.log_q,error_event) for _ in range(self.ThreadNumber)]
self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)
self.AutoGenMgr.start()
for w in auto_workers:
w.start()
if PcdMaList is not None:
for PcdMa in PcdMaList:
- if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
- PcdMa.GenModuleFilesHash(share_data)
- PcdMa.GenPreMakefileHash(share_data)
- if PcdMa.CanSkipbyPreMakefileCache(share_data):
- continue
+ # SourceFileList calling sequence impact the makefile string sequence.
+ # Create cached SourceFileList here to unify its calling sequence for both
+ # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
+ RetVal = PcdMa.SourceFileList
+ # Force cache miss for PCD driver
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:
+ cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "PreMakeCache", False))
PcdMa.CreateCodeFile(False)
PcdMa.CreateMakeFile(False,GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.Path, PcdMa.Arch),[]))
+ # Force cache miss for PCD driver
if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
- PcdMa.GenMakeHeaderFilesHash(share_data)
- PcdMa.GenMakeHash(share_data)
- if PcdMa.CanSkipbyMakeCache(share_data):
- continue
+ cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "MakeCache", False))
self.AutoGenMgr.join()
rt = self.AutoGenMgr.Status
@@ -1182,38 +1184,6 @@ class Build(): EdkLogger.error("Postbuild", POSTBUILD_ERROR, 'Postbuild process is not success!')
EdkLogger.info("\n- Postbuild Done -\n")
- ## Error handling for hash feature
- #
- # On BuildTask error, iterate through the Module Build tracking
- # dictionary to determine wheather a module failed to build. Invalidate
- # the hash associated with that module by removing it from storage.
- #
- #
- def invalidateHash(self):
- # Only for hashing feature
- if not GlobalData.gUseHashCache:
- return
-
- # GlobalData.gModuleBuildTracking contains only modules or libs that cannot be skipped by hash
- for Ma in GlobalData.gModuleBuildTracking:
- # Skip invalidating for Successful Module/Lib builds
- if GlobalData.gModuleBuildTracking[Ma] == 'SUCCESS':
- continue
-
- # The module failed to build, failed to start building, or failed the header check test from this point on
-
- # Remove .hash from build
- ModuleHashFile = os.path.join(Ma.BuildDir, Ma.Name + ".hash")
- if os.path.exists(ModuleHashFile):
- os.remove(ModuleHashFile)
-
- # Remove .hash file from cache
- if GlobalData.gBinCacheDest:
- FileDir = os.path.join(GlobalData.gBinCacheDest, Ma.PlatformInfo.OutputDir, Ma.BuildTarget + "_" + Ma.ToolChain, Ma.Arch, Ma.SourceDir, Ma.MetaFile.BaseName)
- HashFile = os.path.join(FileDir, Ma.Name + '.hash')
- if os.path.exists(HashFile):
- os.remove(HashFile)
-
## Build a module or platform
#
# Create autogen code and makefile for a module or platform, and the launch
@@ -1251,7 +1221,8 @@ class Build(): self.Progress.Start("Generating makefile and code")
data_pipe_file = os.path.join(AutoGenObject.BuildDir, "GlobalVar_%s_%s.bin" % (str(AutoGenObject.Guid),AutoGenObject.Arch))
AutoGenObject.DataPipe.dump(data_pipe_file)
- autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, GlobalData.gCacheIR)
+ cqueue = mp.Queue()
+ autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
with open(AutoGenIdFile,"w") as fw:
fw.write("Arch=%s\n" % "|".join((AutoGenObject.Workspace.ArchList)))
@@ -1292,7 +1263,11 @@ class Build(): LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
- self.UpdateBuildCache()
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
self.BuildModules = []
return True
@@ -1326,7 +1301,11 @@ class Build(): LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,ModAutoGen)
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
- self.UpdateBuildCache()
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
self.BuildModules = []
return True
@@ -1422,7 +1401,11 @@ class Build(): AutoGenObject.BuildTime = LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
- self.UpdateBuildCache()
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
self.BuildModules = []
return True
@@ -1870,13 +1853,7 @@ class Build(): if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
CmdListDict = self._GenFfsCmd(Wa.ArchList)
- # Add Platform and Package level hash in share_data for module hash calculation later
- if GlobalData.gBinCacheSource or GlobalData.gBinCacheDest:
- GlobalData.gCacheIR[('PlatformHash')] = GlobalData.gPlatformHash
- for PkgName in GlobalData.gPackageHash.keys():
- GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]
GlobalData.file_lock = mp.Lock()
- GlobalData.cache_lock = mp.Lock()
GlobalData.FfsCmd = CmdListDict
self.Progress.Stop("done!")
@@ -1888,8 +1865,6 @@ class Build(): AutoGenStart = time.time()
GlobalData.gGlobalDefines['ARCH'] = Arch
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
- GlobalData.libConstPcd = Pa.DataPipe.Get("LibConstPcd")
- GlobalData.Refes = Pa.DataPipe.Get("REFS")
for Module in Pa.Platform.Modules:
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
@@ -1900,13 +1875,11 @@ class Build(): Ma.Workspace = Wa
MaList.append(Ma)
- if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
- Ma.GenModuleFilesHash(GlobalData.gCacheIR)
- Ma.GenPreMakefileHash(GlobalData.gCacheIR)
- if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR):
- self.HashSkipModules.append(Ma)
- EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:
+ if Ma.CanSkipbyPreMakeCache():
continue
+ else:
+ self.PreMakeCacheMiss.add(Ma)
# Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds'
if self.Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
@@ -1929,19 +1902,12 @@ class Build(): return True
if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
- Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR)
- Ma.GenMakeHash(GlobalData.gCacheIR)
- if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR):
- self.HashSkipModules.append(Ma)
- EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))
+ if Ma.CanSkipbyMakeCache():
continue
else:
- EdkLogger.quiet("cache miss: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))
- Ma.PrintFirstMakeCacheMissFile(GlobalData.gCacheIR)
+ self.MakeCacheMiss.add(Ma)
self.BuildModules.append(Ma)
- # Initialize all modules in tracking to 'FAIL'
- GlobalData.gModuleBuildTracking[Ma] = 'FAIL'
self.AutoGenTime += int(round((time.time() - AutoGenStart)))
MakeStart = time.time()
for Ma in self.BuildModules:
@@ -1952,7 +1918,6 @@ class Build(): # we need a full version of makefile for platform
ExitFlag.set()
BuildTask.WaitForComplete()
- self.invalidateHash()
Pa.CreateMakeFile(False)
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
# Start task scheduler
@@ -1962,7 +1927,6 @@ class Build(): # in case there's an interruption. we need a full version of makefile for platform
Pa.CreateMakeFile(False)
if BuildTask.HasError():
- self.invalidateHash()
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
self.MakeTime += int(round((time.time() - MakeStart)))
@@ -1971,11 +1935,14 @@ class Build(): BuildTask.WaitForComplete()
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
- self.UpdateBuildCache()
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
self.BuildModules = []
self.MakeTime += int(round((time.time() - MakeContiue)))
if BuildTask.HasError():
- self.invalidateHash()
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
self.BuildReport.AddPlatformReport(Wa, MaList)
@@ -2028,7 +1995,6 @@ class Build(): # Save MAP buffer into MAP file.
#
self._SaveMapFile (MapBuffer, Wa)
- self.invalidateHash()
def _GenFfsCmd(self,ArchList):
# convert dictionary of Cmd:(Inf,Arch)
@@ -2134,20 +2100,13 @@ class Build(): self.BuildReport.AddPlatformReport(Wa)
Wa.CreateMakeFile(False)
- # Add ffs build to makefile
+ # Add ffs build to makefile
CmdListDict = {}
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
CmdListDict = self._GenFfsCmd(Wa.ArchList)
- # Add Platform and Package level hash in share_data for module hash calculation later
- if GlobalData.gBinCacheSource or GlobalData.gBinCacheDest:
- GlobalData.gCacheIR[('PlatformHash')] = GlobalData.gPlatformHash
- for PkgName in GlobalData.gPackageHash.keys():
- GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]
-
self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))
BuildModules = []
- TotalModules = []
for Arch in Wa.ArchList:
PcdMaList = []
AutoGenStart = time.time()
@@ -2158,7 +2117,7 @@ class Build(): ModuleList = []
for Inf in Pa.Platform.Modules:
ModuleList.append(Inf)
- # Add the INF only list in FDF
+ # Add the INF only list in FDF
if GlobalData.gFdfParser is not None:
for InfName in GlobalData.gFdfParser.Profile.InfList:
Inf = PathClass(NormPath(InfName), self.WorkspaceDir, Arch)
@@ -2172,58 +2131,73 @@ class Build(): Pa.DataPipe.DataContainer = {"LibraryBuildDirectoryList":Pa.LibraryBuildDirectoryList}
Pa.DataPipe.DataContainer = {"ModuleBuildDirectoryList":Pa.ModuleBuildDirectoryList}
Pa.DataPipe.DataContainer = {"FdsCommandDict": Wa.GenFdsCommandDict}
+ # Prepare the cache share data for multiprocessing
+ Pa.DataPipe.DataContainer = {"gPlatformHashFile":GlobalData.gPlatformHashFile}
ModuleCodaFile = {}
for ma in Pa.ModuleAutoGenList:
ModuleCodaFile[(ma.MetaFile.File,ma.MetaFile.Root,ma.Arch,ma.MetaFile.Path)] = [item.Target for item in ma.CodaTargetList]
Pa.DataPipe.DataContainer = {"ModuleCodaFile":ModuleCodaFile}
+ # ModuleList contains all driver modules only
for Module in ModuleList:
- # Get ModuleAutoGen object to generate C code file and makefile
+ # Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
-
if Ma is None:
continue
if Ma.PcdIsDriver:
Ma.PlatformInfo = Pa
Ma.Workspace = Wa
PcdMaList.append(Ma)
- TotalModules.append(Ma)
- # Initialize all modules in tracking to 'FAIL'
- GlobalData.gModuleBuildTracking[Ma] = 'FAIL'
-
+ self.AllDrivers.add(Ma)
+ self.AllModules.add(Ma)
mqueue = mp.Queue()
+ cqueue = mp.Queue()
for m in Pa.GetAllModuleInfo:
mqueue.put(m)
+ module_file,module_root,module_path,module_basename,\
+ module_originalpath,module_arch,IsLib = m
+ Ma = ModuleAutoGen(Wa, PathClass(module_path, Wa), BuildTarget,\
+ ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ self.AllModules.add(Ma)
data_pipe_file = os.path.join(Pa.BuildDir, "GlobalVar_%s_%s.bin" % (str(Pa.Guid),Pa.Arch))
Pa.DataPipe.dump(data_pipe_file)
- autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList,GlobalData.gCacheIR)
-
- # Skip cache hit modules
- if GlobalData.gBinCacheSource:
- for Ma in TotalModules:
- if (Ma.MetaFile.Path, Ma.Arch) in GlobalData.gCacheIR and \
- GlobalData.gCacheIR[(Ma.MetaFile.Path, Ma.Arch)].PreMakeCacheHit:
- self.HashSkipModules.append(Ma)
- continue
- if (Ma.MetaFile.Path, Ma.Arch) in GlobalData.gCacheIR and \
- GlobalData.gCacheIR[(Ma.MetaFile.Path, Ma.Arch)].MakeCacheHit:
- self.HashSkipModules.append(Ma)
- continue
- BuildModules.append(Ma)
- else:
- BuildModules.extend(TotalModules)
+ autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)
if not autogen_rt:
self.AutoGenMgr.TerminateWorkers()
self.AutoGenMgr.join(1)
raise FatalError(errorcode)
+
+ if GlobalData.gUseHashCache:
+ for item in GlobalData.gModuleAllCacheStatus:
+ (MetaFilePath, Arch, CacheStr, Status) = item
+ Ma = ModuleAutoGen(Wa, PathClass(MetaFilePath, Wa), BuildTarget,\
+ ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ if CacheStr == "PreMakeCache" and Status == False:
+ self.PreMakeCacheMiss.add(Ma)
+ if CacheStr == "PreMakeCache" and Status == True:
+ self.PreMakeCacheHit.add(Ma)
+ GlobalData.gModuleCacheHit.add(Ma)
+ if CacheStr == "MakeCache" and Status == False:
+ self.MakeCacheMiss.add(Ma)
+ if CacheStr == "MakeCache" and Status == True:
+ self.MakeCacheHit.add(Ma)
+ GlobalData.gModuleCacheHit.add(Ma)
self.AutoGenTime += int(round((time.time() - AutoGenStart)))
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
with open(AutoGenIdFile,"w") as fw:
fw.write("Arch=%s\n" % "|".join((Wa.ArchList)))
fw.write("BuildDir=%s\n" % Wa.BuildDir)
fw.write("PlatformGuid=%s\n" % str(Wa.AutoGenObjectList[0].Guid))
+
+ if GlobalData.gBinCacheSource:
+ BuildModules.extend(self.MakeCacheMiss)
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheDest:
+ BuildModules.extend(self.PreMakeCacheMiss)
+ else:
+ BuildModules.extend(self.AllDrivers)
+
self.Progress.Stop("done!")
return Wa, BuildModules
@@ -2253,18 +2227,9 @@ class Build(): GlobalData.gAutoGenPhase = False
if GlobalData.gBinCacheSource:
- EdkLogger.quiet("Total cache hit driver num: %s, cache miss driver num: %s" % (len(set(self.HashSkipModules)), len(set(self.BuildModules))))
- CacheHitMa = set()
- CacheNotHitMa = set()
- for IR in GlobalData.gCacheIR.keys():
- if 'PlatformHash' in IR or 'PackageHash' in IR:
- continue
- if GlobalData.gCacheIR[IR].PreMakeCacheHit or GlobalData.gCacheIR[IR].MakeCacheHit:
- CacheHitMa.add(IR)
- else:
- # There might be binary module or module which has .inc files, not count for cache miss
- CacheNotHitMa.add(IR)
- EdkLogger.quiet("Total module num: %s, cache hit module num: %s" % (len(CacheHitMa)+len(CacheNotHitMa), len(CacheHitMa)))
+ EdkLogger.quiet("[cache Summary]: Total module num: %s" % len(self.AllModules))
+ EdkLogger.quiet("[cache Summary]: PreMakecache miss num: %s " % len(self.PreMakeCacheMiss))
+ EdkLogger.quiet("[cache Summary]: Makecache miss num: %s " % len(self.MakeCacheMiss))
for Arch in Wa.ArchList:
MakeStart = time.time()
@@ -2277,7 +2242,6 @@ class Build(): # we need a full version of makefile for platform
ExitFlag.set()
BuildTask.WaitForComplete()
- self.invalidateHash()
Pa.CreateMakeFile(False)
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
# Start task scheduler
@@ -2287,7 +2251,6 @@ class Build(): # in case there's an interruption. we need a full version of makefile for platform
if BuildTask.HasError():
- self.invalidateHash()
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
self.MakeTime += int(round((time.time() - MakeStart)))
@@ -2301,7 +2264,11 @@ class Build(): BuildTask.WaitForComplete()
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
- self.UpdateBuildCache()
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
#
# Get Module List
#
@@ -2313,7 +2280,6 @@ class Build(): # has been signaled.
#
if BuildTask.HasError():
- self.invalidateHash()
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
# Create MAP file when Load Fix Address is enabled.
@@ -2350,7 +2316,6 @@ class Build(): #
self._SaveMapFile(MapBuffer, Wa)
self.CreateGuidedSectionToolsFile(Wa)
- self.invalidateHash()
## Generate GuidedSectionTools.txt in the FV directories.
#
def CreateGuidedSectionToolsFile(self,Wa):
@@ -2406,6 +2371,12 @@ class Build(): ## Launch the module or platform build
#
def Launch(self):
+ self.AllDrivers = set()
+ self.AllModules = set()
+ self.PreMakeCacheMiss = set()
+ self.PreMakeCacheHit = set()
+ self.MakeCacheMiss = set()
+ self.MakeCacheHit = set()
if not self.ModuleFile:
if not self.SpawnMode or self.Target not in ["", "all"]:
self.SpawnMode = False
@@ -2423,23 +2394,16 @@ class Build(): for Module in self.BuildModules:
Module.CreateAsBuiltInf()
- def UpdateBuildCache(self):
- all_lib_set = set()
- all_mod_set = set()
- for Module in self.BuildModules:
+ def GenDestCache(self):
+ for Module in self.AllModules:
+ Module.GenPreMakefileHashList()
+ Module.GenMakefileHashList()
Module.CopyModuleToCache()
- all_mod_set.add(Module)
- for Module in self.HashSkipModules:
- Module.CopyModuleToCache()
- all_mod_set.add(Module)
- for Module in all_mod_set:
- for lib in Module.LibraryAutoGenList:
- all_lib_set.add(lib)
- for lib in all_lib_set:
- lib.CopyModuleToCache()
- all_lib_set.clear()
- all_mod_set.clear()
- self.HashSkipModules = []
+
+ def GenLocalPreMakeCache(self):
+ for Module in self.PreMakeCacheMiss:
+ Module.GenPreMakefileHashList()
+
## Do some clean-up works when error occurred
def Relinquish(self):
OldLogLevel = EdkLogger.GetLevel()
|