#\r
from __future__ import absolute_import\r
from AutoGen.AutoGen import AutoGen\r
-from Common.LongFilePathSupport import CopyLongFilePath\r
+from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath\r
from Common.BuildToolError import *\r
from Common.DataType import *\r
from Common.Misc import *\r
from .GenPcdDb import CreatePcdDatabaseCode\r
from Common.caching import cached_class_function\r
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
-from AutoGen.CacheIR import ModuleBuildCacheIR\r
import json\r
import tempfile\r
\r
self.IsAsBuiltInfCreated = True\r
\r
def CacheCopyFile(self, DestDir, SourceDir, File):\r
+ if os.path.isdir(File):\r
+ return\r
+\r
sub_dir = os.path.relpath(File, SourceDir)\r
destination_file = os.path.join(DestDir, sub_dir)\r
destination_dir = os.path.dirname(destination_file)\r
return\r
\r
def CopyModuleToCache(self):\r
- self.GenPreMakefileHash(GlobalData.gCacheIR)\r
- if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
- not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
+ # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList\r
+ # and PreMakeHashFileList files\r
+ MakeHashStr = None\r
+ PreMakeHashStr = None\r
+ MakeTimeStamp = 0\r
+ PreMakeTimeStamp = 0\r
+ Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]\r
+ for File in Files:\r
+ if ".MakeHashFileList." in File:\r
+ #find lastest file through time stamp\r
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+ if FileTimeStamp > MakeTimeStamp:\r
+ MakeTimeStamp = FileTimeStamp\r
+ MakeHashStr = File.split('.')[-1]\r
+ if len(MakeHashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))\r
+ if ".PreMakeHashFileList." in File:\r
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+ if FileTimeStamp > PreMakeTimeStamp:\r
+ PreMakeTimeStamp = FileTimeStamp\r
+ PreMakeHashStr = File.split('.')[-1]\r
+ if len(PreMakeHashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))\r
\r
- self.GenMakeHash(GlobalData.gCacheIR)\r
- if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
- not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
- not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
+ if not MakeHashStr:\r
+ EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+ if not PreMakeHashStr:\r
+ EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
\r
- MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
- FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
+ # Create Cache destination dirs\r
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+ CacheFileDir = path.join(FileDir, MakeHashStr)\r
+ CacheFfsDir = path.join(FfsDir, MakeHashStr)\r
+ CreateDirectory (CacheFileDir)\r
+ CreateDirectory (CacheFfsDir)\r
\r
- CreateDirectory (FileDir)\r
- self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
- ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
- if os.path.exists(ModuleFile):\r
- CopyFileOnChange(ModuleFile, FileDir)\r
+ # Create ModuleHashPair file to support multiple version cache together\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ if os.path.exists(ModuleHashPair):\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):\r
+ ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))\r
+ with open(ModuleHashPair, 'w') as f:\r
+ json.dump(ModuleHashPairList, f, indent=2)\r
+\r
+ # Copy files to Cache destination dirs\r
if not self.OutputFile:\r
Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
self.OutputFile = Ma.Binaries\r
for File in self.OutputFile:\r
- if os.path.exists(File):\r
- if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
- self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)\r
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
+ self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)\r
+ else:\r
+ if self.Name + ".autogen.hash." in File or \\r
+ self.Name + ".autogen.hashchain." in File or \\r
+ self.Name + ".hash." in File or \\r
+ self.Name + ".hashchain." in File or \\r
+ self.Name + ".PreMakeHashFileList." in File or \\r
+ self.Name + ".MakeHashFileList." in File:\r
+ self.CacheCopyFile(FileDir, self.BuildDir, File)\r
else:\r
- self.CacheCopyFile(FileDir, self.OutputDir, File)\r
-\r
- def SaveHashChainFileToCache(self, gDict):\r
- if not GlobalData.gBinCacheDest:\r
- return False\r
-\r
- self.GenPreMakefileHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- self.GenMakeHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- # save the hash chain list as cache file\r
- MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
- CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
- ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
- MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
- ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
-\r
- # save the HashChainDict as json file\r
- CreateDirectory (CacheDestDir)\r
- CreateDirectory (CacheHashDestDir)\r
- try:\r
- ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- if os.path.exists(ModuleHashPair):\r
- with open(ModuleHashPair, 'r') as f:\r
- ModuleHashPairList = json.load(f)\r
- PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
- MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
- ModuleHashPairList.append((PreMakeHash, MakeHash))\r
- ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
- with open(ModuleHashPair, 'w') as f:\r
- json.dump(ModuleHashPairList, f, indent=2)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
- return False\r
-\r
- try:\r
- with open(MakeHashChain, 'w') as f:\r
- json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
- return False\r
-\r
- try:\r
- with open(ModuleFilesChain, 'w') as f:\r
- json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
- return False\r
-\r
- # save the autogenfile and makefile for debug usage\r
- CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
- CreateDirectory (CacheDebugDir)\r
- CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
- if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
- for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
- CopyFileOnChange(str(File), CacheDebugDir)\r
-\r
- return True\r
-\r
+ self.CacheCopyFile(CacheFileDir, self.BuildDir, File)\r
## Create makefile for the module and its dependent libraries\r
#\r
# @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
#\r
@cached_class_function\r
def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
- gDict = GlobalData.gCacheIR\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- return\r
\r
# nest this function inside it's only caller.\r
def CreateTimeStamp():\r
MakefileType = Makefile._FileType\r
MakefileName = Makefile._FILE_NAME_[MakefileType]\r
MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
-\r
- MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
- MewIR.MakefilePath = MakefilePath\r
- MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
- MewIR.CreateMakeFileDone = True\r
- with GlobalData.cache_lock:\r
- try:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakefilePath = MakefilePath\r
- IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
- IR.CreateMakeFileDone = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- except:\r
- gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+ FilePath = path.join(self.BuildDir, self.Name + ".makefile")\r
+ SaveFileOnChange(FilePath, MakefilePath, False)\r
\r
def CopyBinaryFiles(self):\r
for File in self.Module.Binaries:\r
# dependent libraries will be created\r
#\r
def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
- gDict = GlobalData.gCacheIR\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
- return\r
\r
if self.IsCodeFileCreated:\r
return\r
(" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
\r
self.IsCodeFileCreated = True\r
- MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
- MewIR.CreateCodeFileDone = True\r
- with GlobalData.cache_lock:\r
- try:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.CreateCodeFileDone = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- except:\r
- gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
\r
return AutoGenList\r
\r
self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
return RetVal\r
\r
- def GenModuleHash(self):\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gModuleHash:\r
- GlobalData.gModuleHash[self.Arch] = {}\r
+ def GenCMakeHash(self):\r
+ # GenCMakeHash can only be called in --binary-destination\r
+ # Never called in multiprocessing and always directly save result in main process,\r
+ # so no need remote dict to share the gCMakeHashFile result with main process\r
\r
- # Early exit if module or library has been hashed and is in memory\r
- if self.Name in GlobalData.gModuleHash[self.Arch]:\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+ DependencyFileSet = set()\r
+ # Add AutoGen files\r
+ if self.AutoGenFileList:\r
+ for File in set(self.AutoGenFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Add Makefile\r
+ abspath = path.join(self.BuildDir, self.Name + ".makefile")\r
+ try:\r
+ with open(LongFilePath(abspath),"r") as fd:\r
+ lines = fd.readlines()\r
+ except Exception as e:\r
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+ if lines:\r
+ DependencyFileSet.update(lines)\r
\r
+ # Caculate all above dependency files hash\r
# Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
-\r
- # Add Platform level hash\r
- m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
-\r
- # Add Package level hash\r
- if self.DependentPackageList:\r
- for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if Pkg.PackageName in GlobalData.gPackageHash:\r
- m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
-\r
- # Add Library hash\r
- if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
- Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
-\r
- # Add Module self\r
- with open(str(self.MetaFile), 'rb') as f:\r
- Content = f.read()\r
- m.update(Content)\r
-\r
- # Add Module's source files\r
- if self.SourceFileList:\r
- for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'rb')\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not path.exists(LongFilePath(str(File))):\r
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ with open(LongFilePath(str(File)), 'rb') as f:\r
Content = f.read()\r
- f.close()\r
- m.update(Content)\r
-\r
- GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
-\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
- def GenModuleFilesHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict:\r
- if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())\r
+ GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'w') as f:\r
+ json.dump(FileList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+ return False\r
\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
+ def GenModuleHash(self):\r
+ # GenModuleHash only called after autogen phase\r
+ # Never called in multiprocessing and always directly save result in main process,\r
+ # so no need remote dict to share the gModuleHashFile result with main process\r
+ #\r
+ # GenPreMakefileHashList consume no dict.\r
+ # GenPreMakefileHashList produce local gModuleHashFile dict.\r
\r
DependencyFileSet = set()\r
# Add Module Meta file\r
- DependencyFileSet.add(self.MetaFile)\r
+ DependencyFileSet.add(self.MetaFile.Path)\r
\r
# Add Module's source files\r
if self.SourceFileList:\r
for File in set(self.SourceFileList):\r
- DependencyFileSet.add(File)\r
+ DependencyFileSet.add(File.Path)\r
\r
# Add modules's include header files\r
- # Search dependency file list for each source file\r
- SourceFileList = []\r
- OutPutFileList = []\r
- for Target in self.IntroTargetList:\r
- SourceFileList.extend(Target.Inputs)\r
- OutPutFileList.extend(Target.Outputs)\r
- if OutPutFileList:\r
- for Item in OutPutFileList:\r
- if Item in SourceFileList:\r
- SourceFileList.remove(Item)\r
- SearchList = []\r
- for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
- # skip the folders in platform BuildDir which are not been generated yet\r
- if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
- continue\r
- SearchList.append(file_path)\r
- FileDependencyDict = {}\r
- ForceIncludedFile = []\r
- for F in SourceFileList:\r
- # skip the files which are not been generated yet, because\r
- # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
- if not os.path.exists(F.Path):\r
- continue\r
- FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
+ # Directly use the deps.txt file in the module BuildDir\r
+ abspath = path.join(self.BuildDir, "deps.txt")\r
+ rt = None\r
+ try:\r
+ with open(LongFilePath(abspath),"r") as fd:\r
+ lines = fd.readlines()\r
+ if lines:\r
+ rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])\r
+ except Exception as e:\r
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+\r
+ if rt:\r
+ DependencyFileSet.update(rt)\r
\r
- if FileDependencyDict:\r
- for Dependency in FileDependencyDict.values():\r
- DependencyFileSet.update(set(Dependency))\r
\r
# Caculate all above dependency files hash\r
# Initialze hash object\r
FileList = []\r
m = hashlib.md5()\r
+ BuildDirStr = path.abspath(self.BuildDir).lower()\r
for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
- if not os.path.exists(str(File)):\r
+ # Skip the AutoGen files in BuildDir which already been\r
+ # included in .autogen.hash. file\r
+ if BuildDirStr in path.abspath(File).lower():\r
+ continue\r
+ if not path.exists(LongFilePath(File)):\r
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
continue\r
- with open(str(File), 'rb') as f:\r
+ with open(LongFilePath(File), 'rb') as f:\r
Content = f.read()\r
m.update(Content)\r
- FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
-\r
+ FileList.append((File, hashlib.md5(Content).hexdigest()))\r
\r
- MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
- MewIR.ModuleFilesHashDigest = m.digest()\r
- MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
- MewIR.ModuleFilesChain = FileList\r
- with GlobalData.cache_lock:\r
- try:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.ModuleFilesHashDigest = m.digest()\r
- IR.ModuleFilesHashHexDigest = m.hexdigest()\r
- IR.ModuleFilesChain = FileList\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- except:\r
- gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
-\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
- def GenPreMakefileHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'w') as f:\r
+ json.dump(FileList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+ return False\r
\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
+ def GenPreMakefileHashList(self):\r
+ # GenPreMakefileHashList consume below dicts:\r
+ # gPlatformHashFile\r
+ # gPackageHashFile\r
+ # gModuleHashFile\r
+ # GenPreMakefileHashList produce no dict.\r
+ # gModuleHashFile items might be produced in multiprocessing, so\r
+ # need check gModuleHashFile remote dict\r
\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- self.GenModuleFilesHash(gDict)\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- # Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
-\r
# Add Platform level hash\r
- if ('PlatformHash') in gDict:\r
- m.update(gDict[('PlatformHash')].encode('utf-8'))\r
+ HashFile = GlobalData.gPlatformHashFile\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
else:\r
- EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
+ EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)\r
\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if (Pkg.PackageName, 'PackageHash') in gDict:\r
- m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
+ if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:\r
+ EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
else:\r
- EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
-\r
- # Add Library hash\r
- if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
- not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
- Lib.GenPreMakefileHash(gDict)\r
- m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
+ EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)\r
\r
# Add Module self\r
- m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
-\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.PreMakefileHashHexDigest = m.hexdigest()\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
- def GenMakeHeaderFilesHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
-\r
- # skip binary module\r
- if self.IsBinaryModule:\r
- return\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
- if self.IsLibrary:\r
- if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
- self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
- if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
- self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
- self.CreateCodeFile()\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.Path, self.Arch),[]))\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- DependencyFileSet = set()\r
- # Add Makefile\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
- DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
+ # GenPreMakefileHashList needed in both --binary-destination\r
+ # and --hash. And --hash might save ModuleHashFile in remote dict\r
+ # during multiprocessing.\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
else:\r
- EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-\r
- # Add header files\r
- if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
- for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
- DependencyFileSet.add(File)\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
else:\r
- EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-\r
- # Add AutoGen files\r
- if self.AutoGenFileList:\r
- for File in set(self.AutoGenFileList):\r
- DependencyFileSet.add(File)\r
-\r
- # Caculate all above dependency files hash\r
- # Initialze hash object\r
- FileList = []\r
- m = hashlib.md5()\r
- for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
- if not os.path.exists(str(File)):\r
- EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
- continue\r
- f = open(str(File), 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
- FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.AutoGenFileList = self.AutoGenFileList.keys()\r
- IR.MakeHeaderFilesHashChain = FileList\r
- IR.MakeHeaderFilesHashDigest = m.digest()\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
\r
- def GenMakeHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ # Save PreMakeHashFileList\r
+ FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())\r
+ try:\r
+ with open(LongFilePath(FilePath), 'w') as f:\r
+ json.dump(FileList, f, indent=0)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)\r
\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
+ def GenMakefileHashList(self):\r
+ # GenMakefileHashList only need in --binary-destination which will\r
+ # everything in local dict. So don't need check remote dict.\r
\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- self.GenModuleFilesHash(gDict)\r
- if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
- self.GenMakeHeaderFilesHash(gDict)\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
- EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- # Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
- MakeHashChain = []\r
+ # Add AutoGen hash\r
+ HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)\r
\r
- # Add hash of makefile and dependency header files\r
- m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
- New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
- New.sort(key=lambda x: str(x))\r
- MakeHashChain += New\r
+ # Add Module self\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
- not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
- Lib.GenMakeHash(gDict)\r
- if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
- print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
- continue\r
- m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
- New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
- New.sort(key=lambda x: str(x))\r
- MakeHashChain += New\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
\r
- # Add Module self\r
- m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
- New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
- New.sort(key=lambda x: str(x))\r
- MakeHashChain += New\r
+ # Save MakeHashFileList\r
+ FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())\r
+ try:\r
+ with open(LongFilePath(FilePath), 'w') as f:\r
+ json.dump(FileList, f, indent=0)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)\r
+\r
+ def CheckHashChainFile(self, HashChainFile):\r
+ # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'\r
+ # The x is module name and the 16BytesHexStr is md5 hexdigest of\r
+ # all hashchain files content\r
+ HashStr = HashChainFile.split('.')[-1]\r
+ if len(HashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))\r
+ return False\r
\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakeHashDigest = m.digest()\r
- IR.MakeHashHexDigest = m.hexdigest()\r
- IR.MakeHashChain = MakeHashChain\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'r') as f:\r
+ HashChainList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)\r
+ return False\r
\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ # Print the different file info\r
+ # print(HashChainFile)\r
+ for idx, (SrcFile, SrcHash) in enumerate (HashChainList):\r
+ if SrcFile in GlobalData.gFileHashDict:\r
+ DestHash = GlobalData.gFileHashDict[SrcFile]\r
+ else:\r
+ try:\r
+ with open(LongFilePath(SrcFile), 'rb') as f:\r
+ Content = f.read()\r
+ DestHash = hashlib.md5(Content).hexdigest()\r
+ GlobalData.gFileHashDict[SrcFile] = DestHash\r
+ except IOError as X:\r
+ # cache miss if SrcFile is removed in new version code\r
+ GlobalData.gFileHashDict[SrcFile] = 0\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+ return False\r
+ if SrcHash != DestHash:\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+ return False\r
+\r
+ return True\r
\r
## Decide whether we can skip the left autogen and make process\r
- def CanSkipbyPreMakefileCache(self, gDict):\r
+ def CanSkipbyMakeCache(self):\r
+ # For --binary-source only\r
+ # CanSkipbyMakeCache consume below dicts:\r
+ # gModuleMakeCacheStatus\r
+ # gHashChainStatus\r
+ # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.\r
+ # all these dicts might be produced in multiprocessing, so\r
+ # need check these remote dict\r
+\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
- return True\r
-\r
- if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return False\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:\r
+ return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
\r
- # If Module is binary, do not skip by cache\r
+ # If Module is binary, which has special build rule, do not skip by cache.\r
if self.IsBinaryModule:\r
+ print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
+ # see .inc as binary file, do not skip by hash\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
+ print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # Get the module hash values from stored cache and currrent build\r
- # then check whether cache hit based on the hash values\r
- # if cache hit, restore all the files from cache\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
- if not os.path.exists(ModuleHashPair):\r
- EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.CacheCrash = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- return False\r
-\r
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
try:\r
- with open(ModuleHashPair, 'r') as f:\r
+ with open(LongFilePath(ModuleHashPair), 'r') as f:\r
ModuleHashPairList = json.load(f)\r
except:\r
+ # ModuleHashPair might not exist for new added module\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
return False\r
\r
- self.GenPreMakefileHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- MakeHashStr = None\r
- CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+ # Check the PreMakeHash in ModuleHashPairList one by one\r
for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
- if PreMakefileHash == CurrentPreMakeHash:\r
- MakeHashStr = str(MakeHash)\r
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
\r
- if not MakeHashStr:\r
- return False\r
+ try:\r
+ with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:\r
+ MakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)\r
+ continue\r
\r
- TargetHashDir = path.join(FileDir, MakeHashStr)\r
- TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+ HashMiss = False\r
+ for HashChainFile in MakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ # Convert to path start with cache source dir\r
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+ if self.CheckHashChainFile(NewFilePath):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ # Save the module self HashFile for GenPreMakefileHashList later usage\r
+ if self.Name + ".hashchain." in HashChainFile:\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
\r
- if not os.path.exists(TargetHashDir):\r
- EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
- return False\r
+ if HashMiss:\r
+ continue\r
\r
- for root, dir, files in os.walk(TargetHashDir):\r
- for f in files:\r
- File = path.join(root, f)\r
- self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
- if os.path.exists(TargetFfsHashDir):\r
- for root, dir, files in os.walk(TargetFfsHashDir):\r
+ # PreMakefile cache hit, restore the module build result\r
+ for root, dir, files in os.walk(SourceHashDir):\r
for f in files:\r
File = path.join(root, f)\r
- self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
-\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+ if os.path.exists(SourceFfsHashDir):\r
+ for root, dir, files in os.walk(SourceFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.PreMakeCacheHit = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
- #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return True\r
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- ## Decide whether we can skip the make process\r
- def CanSkipbyMakeCache(self, gDict):\r
- if not GlobalData.gBinCacheSource:\r
+ ## Decide whether we can skip the left autogen and make process\r
+ def CanSkipbyPreMakeCache(self):\r
+ # CanSkipbyPreMakeCache consume below dicts:\r
+ # gModulePreMakeCacheStatus\r
+ # gHashChainStatus\r
+ # gModuleHashFile\r
+ # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.\r
+ # all these dicts might be produced in multiprocessing, so\r
+ # need check these remote dicts\r
+\r
+ if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:\r
return False\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
- return True\r
-\r
- if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return False\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:\r
+ return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
\r
- # If Module is binary, do not skip by cache\r
+ # If Module is binary, which has special build rule, do not skip by cache.\r
if self.IsBinaryModule:\r
- print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
+ print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
+ # see .inc as binary file, do not skip by hash\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakeCacheHit = False\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
+ print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # Get the module hash values from stored cache and currrent build\r
- # then check whether cache hit based on the hash values\r
- # if cache hit, restore all the files from cache\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
-\r
- ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
- if not os.path.exists(ModuleHashPair):\r
- EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.CacheCrash = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- return False\r
-\r
- try:\r
- with open(ModuleHashPair, 'r') as f:\r
- ModuleHashPairList = json.load(f)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
- return False\r
-\r
- self.GenMakeHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- MakeHashStr = None\r
- CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
- for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
- if MakeHash == CurrentMakeHash:\r
- MakeHashStr = str(MakeHash)\r
-\r
- if not MakeHashStr:\r
- print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
- return False\r
-\r
- TargetHashDir = path.join(FileDir, MakeHashStr)\r
- TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
- if not os.path.exists(TargetHashDir):\r
- EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
- return False\r
-\r
- for root, dir, files in os.walk(TargetHashDir):\r
- for f in files:\r
- File = path.join(root, f)\r
- self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
-\r
- if os.path.exists(TargetFfsHashDir):\r
- for root, dir, files in os.walk(TargetFfsHashDir):\r
- for f in files:\r
- File = path.join(root, f)\r
- self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
-\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakeCacheHit = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
- return True\r
-\r
- ## Show the first file name which causes cache miss\r
- def PrintFirstMakeCacheMissFile(self, gDict):\r
+ # For --hash only in the incremental build\r
if not GlobalData.gBinCacheSource:\r
- return\r
-\r
- # skip if the module cache already crashed\r
- if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
-\r
- # skip binary module\r
- if self.IsBinaryModule:\r
- return\r
+ Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]\r
+ PreMakeHashFileList_FilePah = None\r
+ MakeTimeStamp = 0\r
+ # Find latest PreMakeHashFileList file in self.BuildDir folder\r
+ for File in Files:\r
+ if ".PreMakeHashFileList." in File:\r
+ FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]\r
+ if FileTimeStamp > MakeTimeStamp:\r
+ MakeTimeStamp = FileTimeStamp\r
+ PreMakeHashFileList_FilePah = File\r
+ if not PreMakeHashFileList_FilePah:\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict:\r
- return\r
+ try:\r
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+ PreMakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- # Only print cache miss file for the MakeCache not hit module\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
- return\r
+ HashMiss = False\r
+ for HashChainFile in PreMakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ if self.CheckHashChainFile(HashChainFile):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ # Save the module self HashFile for GenPreMakefileHashList later usage\r
+ if self.Name + ".hashchain." in HashChainFile:\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
\r
- if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
- EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
+ if HashMiss:\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+ else:\r
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
\r
- # Find the cache dir name through the .ModuleHashPair file info\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
- if not os.path.exists(ModuleHashPair):\r
- EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
-\r
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
try:\r
- with open(ModuleHashPair, 'r') as f:\r
+ with open(LongFilePath(ModuleHashPair), 'r') as f:\r
ModuleHashPairList = json.load(f)\r
except:\r
- EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
+ # ModuleHashPair might not exist for new added module\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ return False\r
\r
- MakeHashSet = set()\r
+ # Check the PreMakeHash in ModuleHashPairList one by one\r
for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
- TargetHashDir = path.join(FileDir, str(MakeHash))\r
- if os.path.exists(TargetHashDir):\r
- MakeHashSet.add(MakeHash)\r
- if not MakeHashSet:\r
- EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
\r
- TargetHash = list(MakeHashSet)[0]\r
- TargetHashDir = path.join(FileDir, str(TargetHash))\r
- if len(MakeHashSet) > 1 :\r
- EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))\r
-\r
- ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')\r
- if os.path.exists(ListFile):\r
try:\r
- f = open(ListFile, 'r')\r
- CachedList = json.load(f)\r
- f.close()\r
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+ PreMakeHashFileList = json.load(f)\r
except:\r
- EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)\r
- return\r
- else:\r
- EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)\r
- return\r
-\r
- CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain\r
- for idx, (file, hash) in enumerate (CurrentList):\r
- (filecached, hashcached) = CachedList[idx]\r
- if file != filecached:\r
- EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))\r
- break\r
- if hash != hashcached:\r
- EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))\r
- break\r
-\r
- return True\r
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+ continue\r
\r
- ## Decide whether we can skip the ModuleAutoGen process\r
- def CanSkipbyCache(self, gDict):\r
- # Hashing feature is off\r
- if not GlobalData.gBinCacheSource:\r
- return False\r
+ HashMiss = False\r
+ for HashChainFile in PreMakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ # Convert to path start with cache source dir\r
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+ if self.CheckHashChainFile(NewFilePath):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
\r
- if self in GlobalData.gBuildHashSkipTracking:\r
- return GlobalData.gBuildHashSkipTracking[self]\r
+ if HashMiss:\r
+ continue\r
\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
- GlobalData.gBuildHashSkipTracking[self] = False\r
- return False\r
+ # PreMakefile cache hit, restore the module build result\r
+ for root, dir, files in os.walk(SourceHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+ if os.path.exists(SourceFfsHashDir):\r
+ for root, dir, files in os.walk(SourceFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- GlobalData.gBuildHashSkipTracking[self] = False\r
- return False\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict:\r
+ ## Decide whether we can skip the Module build\r
+ def CanSkipbyCache(self, gHitSet):\r
+ # Hashing feature is off\r
+ if not GlobalData.gBinCacheSource:\r
return False\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
- GlobalData.gBuildHashSkipTracking[self] = True\r
- return True\r
-\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
- GlobalData.gBuildHashSkipTracking[self] = True\r
+ if self in gHitSet:\r
return True\r
\r
return False\r
#\r
def AddDependency(self, Dependency):\r
for Dep in Dependency:\r
- if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gCacheIR):\r
+ if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gModuleCacheHit):\r
self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list\r
\r
## The thread wrapper of LaunchCommand function\r
self.BuildItem.BuildObject.BuildTime = LaunchCommand(Command, WorkingDir,self.BuildItem.BuildObject)\r
self.CompleteFlag = True\r
\r
- # Run hash operation post dependency, to account for libs\r
- if GlobalData.gUseHashCache and self.BuildItem.BuildObject.IsLibrary:\r
- HashFile = path.join(self.BuildItem.BuildObject.BuildDir, self.BuildItem.BuildObject.Name + ".hash")\r
- SaveFileOnChange(HashFile, self.BuildItem.BuildObject.GenModuleHash(), True)\r
+ # Run hash operation post dependency to account for libs\r
+ # Run if --hash or --binary-destination\r
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:\r
+ self.BuildItem.BuildObject.GenModuleHash()\r
+ if GlobalData.gBinCacheDest:\r
+ self.BuildItem.BuildObject.GenCMakeHash()\r
+\r
except:\r
#\r
# TRICK: hide the output of threads left running, so that the user can\r
BuildTask._ErrorMessage = "%s broken\n %s [%s]" % \\r
(threading.currentThread().getName(), Command, WorkingDir)\r
\r
- # Set the value used by hash invalidation flow in GlobalData.gModuleBuildTracking to 'SUCCESS'\r
- # If Module or Lib is being tracked, it did not fail header check test, and built successfully\r
- if (self.BuildItem.BuildObject in GlobalData.gModuleBuildTracking and\r
- GlobalData.gModuleBuildTracking[self.BuildItem.BuildObject] != 'FAIL_METAFILE' and\r
- not BuildTask._ErrorFlag.isSet()\r
- ):\r
- GlobalData.gModuleBuildTracking[self.BuildItem.BuildObject] = 'SUCCESS'\r
-\r
# indicate there's a thread is available for another build task\r
BuildTask._RunningQueueLock.acquire()\r
BuildTask._RunningQueue.pop(self.BuildItem)\r
self.AutoGenMgr = None\r
EdkLogger.info("")\r
os.chdir(self.WorkspaceDir)\r
- GlobalData.gCacheIR = Manager().dict()\r
self.log_q = log_q\r
GlobalData.file_lock = mp.Lock()\r
- GlobalData.cache_lock = mp.Lock()\r
- def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,share_data):\r
+ # Init cache data for local only\r
+ GlobalData.gPackageHashFile = dict()\r
+ GlobalData.gModulePreMakeCacheStatus = dict()\r
+ GlobalData.gModuleMakeCacheStatus = dict()\r
+ GlobalData.gHashChainStatus = dict()\r
+ GlobalData.gCMakeHashFile = dict()\r
+ GlobalData.gModuleHashFile = dict()\r
+ GlobalData.gFileHashDict = dict()\r
+ GlobalData.gModuleAllCacheStatus = set()\r
+ GlobalData.gModuleCacheHit = set()\r
+\r
+ def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,cqueue):\r
try:\r
if SkipAutoGen:\r
return True,0\r
if FfsCmd is None:\r
FfsCmd = {}\r
GlobalData.FfsCmd = FfsCmd\r
- GlobalData.libConstPcd = DataPipe.Get("LibConstPcd")\r
- GlobalData.Refes = DataPipe.Get("REFS")\r
- auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,GlobalData.file_lock,GlobalData.cache_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)]\r
+ auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,GlobalData.file_lock,cqueue,self.log_q,error_event) for _ in range(self.ThreadNumber)]\r
self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)\r
self.AutoGenMgr.start()\r
for w in auto_workers:\r
w.start()\r
if PcdMaList is not None:\r
for PcdMa in PcdMaList:\r
- if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
- PcdMa.GenModuleFilesHash(share_data)\r
- PcdMa.GenPreMakefileHash(share_data)\r
- if PcdMa.CanSkipbyPreMakefileCache(share_data):\r
- continue\r
+ # SourceFileList calling sequence impact the makefile string sequence.\r
+ # Create cached SourceFileList here to unify its calling sequence for both\r
+ # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.\r
+ RetVal = PcdMa.SourceFileList\r
+ # Force cache miss for PCD driver\r
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:\r
+ cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "PreMakeCache", False))\r
\r
PcdMa.CreateCodeFile(False)\r
PcdMa.CreateMakeFile(False,GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.Path, PcdMa.Arch),[]))\r
\r
+ # Force cache miss for PCD driver\r
if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
- PcdMa.GenMakeHeaderFilesHash(share_data)\r
- PcdMa.GenMakeHash(share_data)\r
- if PcdMa.CanSkipbyMakeCache(share_data):\r
- continue\r
+ cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "MakeCache", False))\r
\r
self.AutoGenMgr.join()\r
rt = self.AutoGenMgr.Status\r
EdkLogger.error("Postbuild", POSTBUILD_ERROR, 'Postbuild process is not success!')\r
EdkLogger.info("\n- Postbuild Done -\n")\r
\r
- ## Error handling for hash feature\r
- #\r
- # On BuildTask error, iterate through the Module Build tracking\r
- # dictionary to determine wheather a module failed to build. Invalidate\r
- # the hash associated with that module by removing it from storage.\r
- #\r
- #\r
- def invalidateHash(self):\r
- # Only for hashing feature\r
- if not GlobalData.gUseHashCache:\r
- return\r
-\r
- # GlobalData.gModuleBuildTracking contains only modules or libs that cannot be skipped by hash\r
- for Ma in GlobalData.gModuleBuildTracking:\r
- # Skip invalidating for Successful Module/Lib builds\r
- if GlobalData.gModuleBuildTracking[Ma] == 'SUCCESS':\r
- continue\r
-\r
- # The module failed to build, failed to start building, or failed the header check test from this point on\r
-\r
- # Remove .hash from build\r
- ModuleHashFile = os.path.join(Ma.BuildDir, Ma.Name + ".hash")\r
- if os.path.exists(ModuleHashFile):\r
- os.remove(ModuleHashFile)\r
-\r
- # Remove .hash file from cache\r
- if GlobalData.gBinCacheDest:\r
- FileDir = os.path.join(GlobalData.gBinCacheDest, Ma.PlatformInfo.OutputDir, Ma.BuildTarget + "_" + Ma.ToolChain, Ma.Arch, Ma.SourceDir, Ma.MetaFile.BaseName)\r
- HashFile = os.path.join(FileDir, Ma.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- os.remove(HashFile)\r
-\r
## Build a module or platform\r
#\r
# Create autogen code and makefile for a module or platform, and the launch\r
self.Progress.Start("Generating makefile and code")\r
data_pipe_file = os.path.join(AutoGenObject.BuildDir, "GlobalVar_%s_%s.bin" % (str(AutoGenObject.Guid),AutoGenObject.Arch))\r
AutoGenObject.DataPipe.dump(data_pipe_file)\r
- autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, GlobalData.gCacheIR)\r
+ cqueue = mp.Queue()\r
+ autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)\r
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")\r
with open(AutoGenIdFile,"w") as fw:\r
fw.write("Arch=%s\n" % "|".join((AutoGenObject.Workspace.ArchList)))\r
LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)\r
self.CreateAsBuiltInf()\r
if GlobalData.gBinCacheDest:\r
- self.UpdateBuildCache()\r
+ self.GenDestCache()\r
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:\r
+ # Only for --hash\r
+ # Update PreMakeCacheChain files\r
+ self.GenLocalPreMakeCache()\r
self.BuildModules = []\r
return True\r
\r
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,ModAutoGen)\r
self.CreateAsBuiltInf()\r
if GlobalData.gBinCacheDest:\r
- self.UpdateBuildCache()\r
+ self.GenDestCache()\r
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:\r
+ # Only for --hash\r
+ # Update PreMakeCacheChain files\r
+ self.GenLocalPreMakeCache()\r
self.BuildModules = []\r
return True\r
\r
AutoGenObject.BuildTime = LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)\r
self.CreateAsBuiltInf()\r
if GlobalData.gBinCacheDest:\r
- self.UpdateBuildCache()\r
+ self.GenDestCache()\r
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:\r
+ # Only for --hash\r
+ # Update PreMakeCacheChain files\r
+ self.GenLocalPreMakeCache()\r
self.BuildModules = []\r
return True\r
\r
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:\r
CmdListDict = self._GenFfsCmd(Wa.ArchList)\r
\r
- # Add Platform and Package level hash in share_data for module hash calculation later\r
- if GlobalData.gBinCacheSource or GlobalData.gBinCacheDest:\r
- GlobalData.gCacheIR[('PlatformHash')] = GlobalData.gPlatformHash\r
- for PkgName in GlobalData.gPackageHash.keys():\r
- GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]\r
GlobalData.file_lock = mp.Lock()\r
- GlobalData.cache_lock = mp.Lock()\r
GlobalData.FfsCmd = CmdListDict\r
\r
self.Progress.Stop("done!")\r
AutoGenStart = time.time()\r
GlobalData.gGlobalDefines['ARCH'] = Arch\r
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)\r
- GlobalData.libConstPcd = Pa.DataPipe.Get("LibConstPcd")\r
- GlobalData.Refes = Pa.DataPipe.Get("REFS")\r
for Module in Pa.Platform.Modules:\r
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:\r
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)\r
Ma.Workspace = Wa\r
MaList.append(Ma)\r
\r
- if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
- Ma.GenModuleFilesHash(GlobalData.gCacheIR)\r
- Ma.GenPreMakefileHash(GlobalData.gCacheIR)\r
- if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR):\r
- self.HashSkipModules.append(Ma)\r
- EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:\r
+ if Ma.CanSkipbyPreMakeCache():\r
continue\r
+ else:\r
+ self.PreMakeCacheMiss.add(Ma)\r
\r
# Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds'\r
if self.Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:\r
return True\r
\r
if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
- Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR)\r
- Ma.GenMakeHash(GlobalData.gCacheIR)\r
- if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR):\r
- self.HashSkipModules.append(Ma)\r
- EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
+ if Ma.CanSkipbyMakeCache():\r
continue\r
else:\r
- EdkLogger.quiet("cache miss: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
- Ma.PrintFirstMakeCacheMissFile(GlobalData.gCacheIR)\r
+ self.MakeCacheMiss.add(Ma)\r
\r
self.BuildModules.append(Ma)\r
- # Initialize all modules in tracking to 'FAIL'\r
- GlobalData.gModuleBuildTracking[Ma] = 'FAIL'\r
self.AutoGenTime += int(round((time.time() - AutoGenStart)))\r
MakeStart = time.time()\r
for Ma in self.BuildModules:\r
# we need a full version of makefile for platform\r
ExitFlag.set()\r
BuildTask.WaitForComplete()\r
- self.invalidateHash()\r
Pa.CreateMakeFile(False)\r
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)\r
# Start task scheduler\r
# in case there's an interruption. we need a full version of makefile for platform\r
Pa.CreateMakeFile(False)\r
if BuildTask.HasError():\r
- self.invalidateHash()\r
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)\r
self.MakeTime += int(round((time.time() - MakeStart)))\r
\r
BuildTask.WaitForComplete()\r
self.CreateAsBuiltInf()\r
if GlobalData.gBinCacheDest:\r
- self.UpdateBuildCache()\r
+ self.GenDestCache()\r
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:\r
+ # Only for --hash\r
+ # Update PreMakeCacheChain files\r
+ self.GenLocalPreMakeCache()\r
self.BuildModules = []\r
self.MakeTime += int(round((time.time() - MakeContiue)))\r
if BuildTask.HasError():\r
- self.invalidateHash()\r
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)\r
\r
self.BuildReport.AddPlatformReport(Wa, MaList)\r
# Save MAP buffer into MAP file.\r
#\r
self._SaveMapFile (MapBuffer, Wa)\r
- self.invalidateHash()\r
\r
def _GenFfsCmd(self,ArchList):\r
# convert dictionary of Cmd:(Inf,Arch)\r
self.BuildReport.AddPlatformReport(Wa)\r
Wa.CreateMakeFile(False)\r
\r
- # Add ffs build to makefile\r
+ # Add ffs build to makefile\r
CmdListDict = {}\r
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:\r
CmdListDict = self._GenFfsCmd(Wa.ArchList)\r
\r
- # Add Platform and Package level hash in share_data for module hash calculation later\r
- if GlobalData.gBinCacheSource or GlobalData.gBinCacheDest:\r
- GlobalData.gCacheIR[('PlatformHash')] = GlobalData.gPlatformHash\r
- for PkgName in GlobalData.gPackageHash.keys():\r
- GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]\r
-\r
self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))\r
BuildModules = []\r
- TotalModules = []\r
for Arch in Wa.ArchList:\r
PcdMaList = []\r
AutoGenStart = time.time()\r
ModuleList = []\r
for Inf in Pa.Platform.Modules:\r
ModuleList.append(Inf)\r
- # Add the INF only list in FDF\r
+ # Add the INF only list in FDF\r
if GlobalData.gFdfParser is not None:\r
for InfName in GlobalData.gFdfParser.Profile.InfList:\r
Inf = PathClass(NormPath(InfName), self.WorkspaceDir, Arch)\r
Pa.DataPipe.DataContainer = {"LibraryBuildDirectoryList":Pa.LibraryBuildDirectoryList}\r
Pa.DataPipe.DataContainer = {"ModuleBuildDirectoryList":Pa.ModuleBuildDirectoryList}\r
Pa.DataPipe.DataContainer = {"FdsCommandDict": Wa.GenFdsCommandDict}\r
+ # Prepare the cache share data for multiprocessing\r
+ Pa.DataPipe.DataContainer = {"gPlatformHashFile":GlobalData.gPlatformHashFile}\r
ModuleCodaFile = {}\r
for ma in Pa.ModuleAutoGenList:\r
ModuleCodaFile[(ma.MetaFile.File,ma.MetaFile.Root,ma.Arch,ma.MetaFile.Path)] = [item.Target for item in ma.CodaTargetList]\r
Pa.DataPipe.DataContainer = {"ModuleCodaFile":ModuleCodaFile}\r
+ # ModuleList contains all driver modules only\r
for Module in ModuleList:\r
- # Get ModuleAutoGen object to generate C code file and makefile\r
+ # Get ModuleAutoGen object to generate C code file and makefile\r
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)\r
-\r
if Ma is None:\r
continue\r
if Ma.PcdIsDriver:\r
Ma.PlatformInfo = Pa\r
Ma.Workspace = Wa\r
PcdMaList.append(Ma)\r
- TotalModules.append(Ma)\r
- # Initialize all modules in tracking to 'FAIL'\r
- GlobalData.gModuleBuildTracking[Ma] = 'FAIL'\r
-\r
+ self.AllDrivers.add(Ma)\r
+ self.AllModules.add(Ma)\r
\r
mqueue = mp.Queue()\r
+ cqueue = mp.Queue()\r
for m in Pa.GetAllModuleInfo:\r
mqueue.put(m)\r
+ module_file,module_root,module_path,module_basename,\\r
+ module_originalpath,module_arch,IsLib = m\r
+ Ma = ModuleAutoGen(Wa, PathClass(module_path, Wa), BuildTarget,\\r
+ ToolChain, Arch, self.PlatformFile,Pa.DataPipe)\r
+ self.AllModules.add(Ma)\r
data_pipe_file = os.path.join(Pa.BuildDir, "GlobalVar_%s_%s.bin" % (str(Pa.Guid),Pa.Arch))\r
Pa.DataPipe.dump(data_pipe_file)\r
\r
- autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList,GlobalData.gCacheIR)\r
-\r
- # Skip cache hit modules\r
- if GlobalData.gBinCacheSource:\r
- for Ma in TotalModules:\r
- if (Ma.MetaFile.Path, Ma.Arch) in GlobalData.gCacheIR and \\r
- GlobalData.gCacheIR[(Ma.MetaFile.Path, Ma.Arch)].PreMakeCacheHit:\r
- self.HashSkipModules.append(Ma)\r
- continue\r
- if (Ma.MetaFile.Path, Ma.Arch) in GlobalData.gCacheIR and \\r
- GlobalData.gCacheIR[(Ma.MetaFile.Path, Ma.Arch)].MakeCacheHit:\r
- self.HashSkipModules.append(Ma)\r
- continue\r
- BuildModules.append(Ma)\r
- else:\r
- BuildModules.extend(TotalModules)\r
+ autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)\r
\r
if not autogen_rt:\r
self.AutoGenMgr.TerminateWorkers()\r
self.AutoGenMgr.join(1)\r
raise FatalError(errorcode)\r
+\r
+ if GlobalData.gUseHashCache:\r
+ for item in GlobalData.gModuleAllCacheStatus:\r
+ (MetaFilePath, Arch, CacheStr, Status) = item\r
+ Ma = ModuleAutoGen(Wa, PathClass(MetaFilePath, Wa), BuildTarget,\\r
+ ToolChain, Arch, self.PlatformFile,Pa.DataPipe)\r
+ if CacheStr == "PreMakeCache" and Status == False:\r
+ self.PreMakeCacheMiss.add(Ma)\r
+ if CacheStr == "PreMakeCache" and Status == True:\r
+ self.PreMakeCacheHit.add(Ma)\r
+ GlobalData.gModuleCacheHit.add(Ma)\r
+ if CacheStr == "MakeCache" and Status == False:\r
+ self.MakeCacheMiss.add(Ma)\r
+ if CacheStr == "MakeCache" and Status == True:\r
+ self.MakeCacheHit.add(Ma)\r
+ GlobalData.gModuleCacheHit.add(Ma)\r
self.AutoGenTime += int(round((time.time() - AutoGenStart)))\r
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")\r
with open(AutoGenIdFile,"w") as fw:\r
fw.write("Arch=%s\n" % "|".join((Wa.ArchList)))\r
fw.write("BuildDir=%s\n" % Wa.BuildDir)\r
fw.write("PlatformGuid=%s\n" % str(Wa.AutoGenObjectList[0].Guid))\r
+\r
+ if GlobalData.gBinCacheSource:\r
+ BuildModules.extend(self.MakeCacheMiss)\r
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheDest:\r
+ BuildModules.extend(self.PreMakeCacheMiss)\r
+ else:\r
+ BuildModules.extend(self.AllDrivers)\r
+\r
self.Progress.Stop("done!")\r
return Wa, BuildModules\r
\r
GlobalData.gAutoGenPhase = False\r
\r
if GlobalData.gBinCacheSource:\r
- EdkLogger.quiet("Total cache hit driver num: %s, cache miss driver num: %s" % (len(set(self.HashSkipModules)), len(set(self.BuildModules))))\r
- CacheHitMa = set()\r
- CacheNotHitMa = set()\r
- for IR in GlobalData.gCacheIR.keys():\r
- if 'PlatformHash' in IR or 'PackageHash' in IR:\r
- continue\r
- if GlobalData.gCacheIR[IR].PreMakeCacheHit or GlobalData.gCacheIR[IR].MakeCacheHit:\r
- CacheHitMa.add(IR)\r
- else:\r
- # There might be binary module or module which has .inc files, not count for cache miss\r
- CacheNotHitMa.add(IR)\r
- EdkLogger.quiet("Total module num: %s, cache hit module num: %s" % (len(CacheHitMa)+len(CacheNotHitMa), len(CacheHitMa)))\r
+ EdkLogger.quiet("[cache Summary]: Total module num: %s" % len(self.AllModules))\r
+ EdkLogger.quiet("[cache Summary]: PreMakecache miss num: %s " % len(self.PreMakeCacheMiss))\r
+ EdkLogger.quiet("[cache Summary]: Makecache miss num: %s " % len(self.MakeCacheMiss))\r
\r
for Arch in Wa.ArchList:\r
MakeStart = time.time()\r
# we need a full version of makefile for platform\r
ExitFlag.set()\r
BuildTask.WaitForComplete()\r
- self.invalidateHash()\r
Pa.CreateMakeFile(False)\r
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)\r
# Start task scheduler\r
# in case there's an interruption. we need a full version of makefile for platform\r
\r
if BuildTask.HasError():\r
- self.invalidateHash()\r
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)\r
self.MakeTime += int(round((time.time() - MakeStart)))\r
\r
BuildTask.WaitForComplete()\r
self.CreateAsBuiltInf()\r
if GlobalData.gBinCacheDest:\r
- self.UpdateBuildCache()\r
+ self.GenDestCache()\r
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:\r
+ # Only for --hash\r
+ # Update PreMakeCacheChain files\r
+ self.GenLocalPreMakeCache()\r
#\r
# Get Module List\r
#\r
# has been signaled.\r
#\r
if BuildTask.HasError():\r
- self.invalidateHash()\r
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)\r
\r
# Create MAP file when Load Fix Address is enabled.\r
#\r
self._SaveMapFile(MapBuffer, Wa)\r
self.CreateGuidedSectionToolsFile(Wa)\r
- self.invalidateHash()\r
## Generate GuidedSectionTools.txt in the FV directories.\r
#\r
def CreateGuidedSectionToolsFile(self,Wa):\r
## Launch the module or platform build\r
#\r
def Launch(self):\r
+ self.AllDrivers = set()\r
+ self.AllModules = set()\r
+ self.PreMakeCacheMiss = set()\r
+ self.PreMakeCacheHit = set()\r
+ self.MakeCacheMiss = set()\r
+ self.MakeCacheHit = set()\r
if not self.ModuleFile:\r
if not self.SpawnMode or self.Target not in ["", "all"]:\r
self.SpawnMode = False\r
for Module in self.BuildModules:\r
Module.CreateAsBuiltInf()\r
\r
- def UpdateBuildCache(self):\r
- all_lib_set = set()\r
- all_mod_set = set()\r
- for Module in self.BuildModules:\r
+ def GenDestCache(self):\r
+ for Module in self.AllModules:\r
+ Module.GenPreMakefileHashList()\r
+ Module.GenMakefileHashList()\r
Module.CopyModuleToCache()\r
- all_mod_set.add(Module)\r
- for Module in self.HashSkipModules:\r
- Module.CopyModuleToCache()\r
- all_mod_set.add(Module)\r
- for Module in all_mod_set:\r
- for lib in Module.LibraryAutoGenList:\r
- all_lib_set.add(lib)\r
- for lib in all_lib_set:\r
- lib.CopyModuleToCache()\r
- all_lib_set.clear()\r
- all_mod_set.clear()\r
- self.HashSkipModules = []\r
+\r
+ def GenLocalPreMakeCache(self):\r
+ for Module in self.PreMakeCacheMiss:\r
+ Module.GenPreMakefileHashList()\r
+\r
## Do some clean-up works when error occurred\r
def Relinquish(self):\r
OldLogLevel = EdkLogger.GetLevel()\r