from .GenPcdDb import CreatePcdDatabaseCode\r
from Common.caching import cached_class_function\r
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
+from AutoGen.CacheIR import ModuleBuildCacheIR\r
+import json\r
+import tempfile\r
\r
## Mapping Makefile type\r
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r
GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r
\r
- self.Workspace = None\r
+ self.Workspace = Workspace\r
self.WorkspaceDir = ""\r
self.PlatformInfo = None\r
self.DataPipe = DataPipe\r
self.AutoGenDepSet = set()\r
self.ReferenceModules = []\r
self.ConstPcd = {}\r
+ self.Makefile = None\r
+ self.FileDependCache = {}\r
\r
def __init_platform_info__(self):\r
pinfo = self.DataPipe.Get("P_Info")\r
- self.Workspace = WorkSpaceInfo(pinfo.get("WorkspaceDir"),pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("ArchList"))\r
self.WorkspaceDir = pinfo.get("WorkspaceDir")\r
self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)\r
## hash() operator of ModuleAutoGen\r
ModuleNames = self.DataPipe.Get("M_Name")\r
if not ModuleNames:\r
return self.Name\r
- return ModuleNames.get(self.Name,self.Name)\r
+ return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r
\r
# Macros could be used in build_rule.txt (also Makefile)\r
@cached_property\r
def BuildCommand(self):\r
return self.PlatformInfo.BuildCommand\r
\r
- ## Get object list of all packages the module and its dependent libraries belong to\r
+ ## Get Module package and Platform package\r
+ #\r
+ # @retval list The list of package object\r
+ #\r
+ @cached_property\r
+ def PackageList(self):\r
+ PkagList = []\r
+ if self.Module.Packages:\r
+ PkagList.extend(self.Module.Packages)\r
+ Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
+ for Package in Platform.Packages:\r
+ if Package in PkagList:\r
+ continue\r
+ PkagList.append(Package)\r
+ return PkagList\r
+\r
+ ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
#\r
# @retval list The list of package object\r
#\r
@cached_property\r
def DerivedPackageList(self):\r
PackageList = []\r
- for M in [self.Module] + self.DependentLibraryList:\r
+ PackageList.extend(self.PackageList)\r
+ for M in self.DependentLibraryList:\r
for Package in M.Packages:\r
if Package in PackageList:\r
continue\r
self.Targets\r
return self._FileTypes\r
\r
- ## Get the list of package object the module depends on\r
+ ## Get the list of package object the module depends on and the Platform depends on\r
#\r
# @retval list The package object list\r
#\r
@cached_property\r
def DependentPackageList(self):\r
- return self.Module.Packages\r
+ return self.PackageList\r
\r
## Return the list of auto-generated code file\r
#\r
RetVal.append(self.MetaFile.Dir)\r
RetVal.append(self.DebugDir)\r
\r
- for Package in self.Module.Packages:\r
+ for Package in self.PackageList:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
if PackageDir not in RetVal:\r
RetVal.append(PackageDir)\r
for Inc in IncludesList:\r
if Inc not in RetVal:\r
RetVal.append(str(Inc))\r
+ RetVal.extend(self.IncPathFromBuildOptions)\r
return RetVal\r
\r
+ @cached_property\r
+ def IncPathFromBuildOptions(self):\r
+ IncPathList = []\r
+ for tool in self.BuildOption:\r
+ if 'FLAGS' in self.BuildOption[tool]:\r
+ flags = self.BuildOption[tool]['FLAGS']\r
+ whitespace = False\r
+ for flag in flags.split(" "):\r
+ flag = flag.strip()\r
+ if flag.startswith(("/I","-I")):\r
+ if len(flag)>2:\r
+ if os.path.exists(flag[2:]):\r
+ IncPathList.append(flag[2:])\r
+ else:\r
+ whitespace = True\r
+ continue\r
+ if whitespace and flag:\r
+ if os.path.exists(flag):\r
+ IncPathList.append(flag)\r
+ whitespace = False\r
+ return IncPathList\r
+\r
@cached_property\r
def IncludePathLength(self):\r
return sum(len(inc)+1 for inc in self.IncludePathList)\r
\r
+ ## Get the list of include paths from the packages\r
+ #\r
+ # @IncludesList list The list path\r
+ #\r
+ @cached_property\r
+ def PackageIncludePathList(self):\r
+ IncludesList = []\r
+ for Package in self.PackageList:\r
+ PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
+ IncludesList = Package.Includes\r
+ if Package._PrivateIncludes:\r
+ if not self.MetaFile.Path.startswith(PackageDir):\r
+ IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
+ return IncludesList\r
+\r
## Get HII EX PCDs which maybe used by VFR\r
#\r
# efivarstore used by VFR may relate with HII EX PCDs\r
fStringIO.close ()\r
fInputfile.close ()\r
return OutputName\r
+\r
@cached_property\r
def OutputFile(self):\r
retVal = set()\r
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
- DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
- for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- retVal.add(File)\r
- if self.DepexGenerated:\r
- retVal.add(self.Name + '.depex')\r
\r
- Bin = self._GenOffsetBin()\r
- if Bin:\r
- retVal.add(Bin)\r
+ for Root, Dirs, Files in os.walk(self.BuildDir):\r
+ for File in Files:\r
+ # lib file is already added through above CodaTargetList, skip it here\r
+ if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
+ NewFile = path.join(Root, File)\r
+ retVal.add(NewFile)\r
\r
- for Root, Dirs, Files in os.walk(OutputDir):\r
+ for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
for File in Files:\r
- if File.lower().endswith('.pdb'):\r
- retVal.add(File)\r
+ NewFile = path.join(Root, File)\r
+ retVal.add(NewFile)\r
\r
return retVal\r
\r
\r
self.IsAsBuiltInfCreated = True\r
\r
+ def CacheCopyFile(self, DestDir, SourceDir, File):\r
+ sub_dir = os.path.relpath(File, SourceDir)\r
+ destination_file = os.path.join(DestDir, sub_dir)\r
+ destination_dir = os.path.dirname(destination_file)\r
+ CreateDirectory(destination_dir)\r
+ try:\r
+ CopyFileOnChange(File, destination_dir)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r
+ return\r
+\r
def CopyModuleToCache(self):\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ self.GenPreMakefileHash(GlobalData.gCacheIR)\r
+ if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
+ not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ self.GenMakeHash(GlobalData.gCacheIR)\r
+ if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
+ not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
+ not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
+\r
CreateDirectory (FileDir)\r
- HashFile = path.join(self.BuildDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- CopyFileOnChange(HashFile, FileDir)\r
+ self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
if os.path.exists(ModuleFile):\r
CopyFileOnChange(ModuleFile, FileDir)\r
Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
self.OutputFile = Ma.Binaries\r
for File in self.OutputFile:\r
- File = str(File)\r
- if not os.path.isabs(File):\r
- File = os.path.join(self.OutputDir, File)\r
if os.path.exists(File):\r
- sub_dir = os.path.relpath(File, self.OutputDir)\r
- destination_file = os.path.join(FileDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
+ self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)\r
+ else:\r
+ self.CacheCopyFile(FileDir, self.OutputDir, File)\r
\r
- def AttemptModuleCacheCopy(self):\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
+ def SaveHashChainFileToCache(self, gDict):\r
+ if not GlobalData.gBinCacheDest:\r
+ return False\r
+\r
+ self.GenPreMakefileHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ self.GenMakeHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
return False\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- return False\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- HashFile = path.join(FileDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- f = open(HashFile, 'r')\r
- CacheHash = f.read()\r
- f.close()\r
- self.GenModuleHash()\r
- if GlobalData.gModuleHash[self.Arch][self.Name]:\r
- if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r
- for root, dir, files in os.walk(FileDir):\r
- for f in files:\r
- if self.Name + '.hash' in f:\r
- CopyFileOnChange(HashFile, self.BuildDir)\r
- else:\r
- File = path.join(root, f)\r
- sub_dir = os.path.relpath(File, FileDir)\r
- destination_file = os.path.join(self.OutputDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- return True\r
- return False\r
+\r
+ # save the hash chain list as cache file\r
+ MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
+ CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
+ ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
+ MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
+ ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
+\r
+ # save the HashChainDict as json file\r
+ CreateDirectory (CacheDestDir)\r
+ CreateDirectory (CacheHashDestDir)\r
+ try:\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ if os.path.exists(ModuleHashPair):\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+ MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
+ ModuleHashPairList.append((PreMakeHash, MakeHash))\r
+ ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
+ with open(ModuleHashPair, 'w') as f:\r
+ json.dump(ModuleHashPairList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ try:\r
+ with open(MakeHashChain, 'w') as f:\r
+ json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
+ return False\r
+\r
+ try:\r
+ with open(ModuleFilesChain, 'w') as f:\r
+ json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
+ return False\r
+\r
+ # save the autogenfile and makefile for debug usage\r
+ CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
+ CreateDirectory (CacheDebugDir)\r
+ CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
+ if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
+ for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
+ CopyFileOnChange(str(File), CacheDebugDir)\r
+\r
+ return True\r
\r
## Create makefile for the module and its dependent libraries\r
#\r
#\r
@cached_class_function\r
def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
+ gDict = GlobalData.gCacheIR\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
+ return\r
+\r
# nest this function inside it's only caller.\r
def CreateTimeStamp():\r
FileSet = {self.MetaFile.Path}\r
\r
if os.path.exists (self.TimeStampPath):\r
os.remove (self.TimeStampPath)\r
- with open(self.TimeStampPath, 'w+') as fd:\r
- for f in FileSet:\r
- fd.write(f)\r
- fd.write("\n")\r
+\r
+ SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
\r
# Ignore generating makefile when it is a binary module\r
if self.IsBinaryModule:\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateMakeFile()\r
\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
+ # CanSkip uses timestamps to determine build skipping\r
+ if self.CanSkip():\r
return\r
\r
if len(self.CustomMakefile) == 0:\r
\r
CreateTimeStamp()\r
\r
+ MakefileType = Makefile._FileType\r
+ MakefileName = Makefile._FILE_NAME_[MakefileType]\r
+ MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
+\r
+ MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
+ MewIR.MakefilePath = MakefilePath\r
+ MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
+ MewIR.CreateMakeFileDone = True\r
+ with GlobalData.cache_lock:\r
+ try:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakefilePath = MakefilePath\r
+ IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
+ IR.CreateMakeFileDone = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ except:\r
+ gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+\r
def CopyBinaryFiles(self):\r
for File in self.Module.Binaries:\r
SrcPath = File.Path\r
# dependent libraries will be created\r
#\r
def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
+ gDict = GlobalData.gCacheIR\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
+ return\r
+\r
if self.IsCodeFileCreated:\r
return\r
\r
if not self.IsLibrary and CreateLibraryCodeFile:\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateCodeFile()\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
- return\r
\r
+ # CanSkip uses timestamps to determine build skipping\r
+ if self.CanSkip():\r
+ return\r
+ self.LibraryAutoGenList\r
AutoGenList = []\r
IgoredAutoGenList = []\r
\r
(" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
\r
self.IsCodeFileCreated = True\r
+ MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
+ MewIR.CreateCodeFileDone = True\r
+ with GlobalData.cache_lock:\r
+ try:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CreateCodeFileDone = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ except:\r
+ gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+\r
return AutoGenList\r
\r
## Summarize the ModuleAutoGen objects of all libraries used by this module\r
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'rb')\r
- Content = f.read()\r
- f.close()\r
+ with open(str(self.MetaFile), 'rb') as f:\r
+ Content = f.read()\r
m.update(Content)\r
\r
# Add Module's source files\r
\r
return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
\r
+ def GenModuleFilesHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict:\r
+ if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
+ DependencyFileSet = set()\r
+ # Add Module Meta file\r
+ DependencyFileSet.add(self.MetaFile)\r
+\r
+ # Add Module's source files\r
+ if self.SourceFileList:\r
+ for File in set(self.SourceFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Add modules's include header files\r
+ # Search dependency file list for each source file\r
+ SourceFileList = []\r
+ OutPutFileList = []\r
+ for Target in self.IntroTargetList:\r
+ SourceFileList.extend(Target.Inputs)\r
+ OutPutFileList.extend(Target.Outputs)\r
+ if OutPutFileList:\r
+ for Item in OutPutFileList:\r
+ if Item in SourceFileList:\r
+ SourceFileList.remove(Item)\r
+ SearchList = []\r
+ for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
+ # skip the folders in platform BuildDir which are not been generated yet\r
+ if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
+ continue\r
+ SearchList.append(file_path)\r
+ FileDependencyDict = {}\r
+ ForceIncludedFile = []\r
+ for F in SourceFileList:\r
+ # skip the files which are not been generated yet, because\r
+ # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
+ if not os.path.exists(F.Path):\r
+ continue\r
+ FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
+\r
+ if FileDependencyDict:\r
+ for Dependency in FileDependencyDict.values():\r
+ DependencyFileSet.update(set(Dependency))\r
+\r
+ # Caculate all above dependency files hash\r
+ # Initialze hash object\r
+ FileList = []\r
+ m = hashlib.md5()\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not os.path.exists(str(File)):\r
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ with open(str(File), 'rb') as f:\r
+ Content = f.read()\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+\r
+\r
+ MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
+ MewIR.ModuleFilesHashDigest = m.digest()\r
+ MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
+ MewIR.ModuleFilesChain = FileList\r
+ with GlobalData.cache_lock:\r
+ try:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.ModuleFilesHashDigest = m.digest()\r
+ IR.ModuleFilesHashHexDigest = m.hexdigest()\r
+ IR.ModuleFilesChain = FileList\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ except:\r
+ gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ def GenPreMakefileHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
+ self.GenModuleFilesHash(gDict)\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ # Initialze hash object\r
+ m = hashlib.md5()\r
+\r
+ # Add Platform level hash\r
+ if ('PlatformHash') in gDict:\r
+ m.update(gDict[('PlatformHash')].encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
+\r
+ # Add Package level hash\r
+ if self.DependentPackageList:\r
+ for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
+ if (Pkg.PackageName, 'PackageHash') in gDict:\r
+ m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
+\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
+ if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
+ not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
+ Lib.GenPreMakefileHash(gDict)\r
+ m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
+\r
+ # Add Module self\r
+ m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
+\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.PreMakefileHashHexDigest = m.hexdigest()\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ def GenMakeHeaderFilesHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
+ if self.IsLibrary:\r
+ if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
+ self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
+ if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
+ self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
+ self.CreateCodeFile()\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
+ self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.Path, self.Arch),[]))\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
+ EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ DependencyFileSet = set()\r
+ # Add Makefile\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
+ DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+\r
+ # Add header files\r
+ if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
+ for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
+ DependencyFileSet.add(File)\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+\r
+ # Add AutoGen files\r
+ if self.AutoGenFileList:\r
+ for File in set(self.AutoGenFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Caculate all above dependency files hash\r
+ # Initialze hash object\r
+ FileList = []\r
+ m = hashlib.md5()\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not os.path.exists(str(File)):\r
+ EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ f = open(str(File), 'rb')\r
+ Content = f.read()\r
+ f.close()\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.AutoGenFileList = self.AutoGenFileList.keys()\r
+ IR.MakeHeaderFilesHashChain = FileList\r
+ IR.MakeHeaderFilesHashDigest = m.digest()\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ def GenMakeHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
+ self.GenModuleFilesHash(gDict)\r
+ if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
+ self.GenMakeHeaderFilesHash(gDict)\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ # Initialze hash object\r
+ m = hashlib.md5()\r
+ MakeHashChain = []\r
+\r
+ # Add hash of makefile and dependency header files\r
+ m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
+ New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
+ New.sort(key=lambda x: str(x))\r
+ MakeHashChain += New\r
+\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
+ if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
+ not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
+ Lib.GenMakeHash(gDict)\r
+ if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
+ print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
+ continue\r
+ m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
+ New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
+ New.sort(key=lambda x: str(x))\r
+ MakeHashChain += New\r
+\r
+ # Add Module self\r
+ m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
+ New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
+ New.sort(key=lambda x: str(x))\r
+ MakeHashChain += New\r
+\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakeHashDigest = m.digest()\r
+ IR.MakeHashHexDigest = m.hexdigest()\r
+ IR.MakeHashChain = MakeHashChain\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ ## Decide whether we can skip the left autogen and make process\r
+ def CanSkipbyPreMakefileCache(self, gDict):\r
+ if not GlobalData.gBinCacheSource:\r
+ return False\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
+ return True\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return False\r
+\r
+ # If Module is binary, do not skip by cache\r
+ if self.IsBinaryModule:\r
+ return False\r
+\r
+ # .inc is contains binary information so do not skip by hash as well\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ return False\r
+\r
+ # Get the module hash values from stored cache and currrent build\r
+ # then check whether cache hit based on the hash values\r
+ # if cache hit, restore all the files from cache\r
+ FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ if not os.path.exists(ModuleHashPair):\r
+ EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CacheCrash = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ return False\r
+\r
+ try:\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ self.GenPreMakefileHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ MakeHashStr = None\r
+ CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ if PreMakefileHash == CurrentPreMakeHash:\r
+ MakeHashStr = str(MakeHash)\r
+\r
+ if not MakeHashStr:\r
+ return False\r
+\r
+ TargetHashDir = path.join(FileDir, MakeHashStr)\r
+ TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+\r
+ if not os.path.exists(TargetHashDir):\r
+ EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
+ return False\r
+\r
+ for root, dir, files in os.walk(TargetHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
+ if os.path.exists(TargetFfsHashDir):\r
+ for root, dir, files in os.walk(TargetFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.PreMakeCacheHit = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
+ #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return True\r
+\r
+ ## Decide whether we can skip the make process\r
+ def CanSkipbyMakeCache(self, gDict):\r
+ if not GlobalData.gBinCacheSource:\r
+ return False\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
+ return True\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return False\r
+\r
+ # If Module is binary, do not skip by cache\r
+ if self.IsBinaryModule:\r
+ print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ # .inc is contains binary information so do not skip by hash as well\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakeCacheHit = False\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ # Get the module hash values from stored cache and currrent build\r
+ # then check whether cache hit based on the hash values\r
+ # if cache hit, restore all the files from cache\r
+ FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ if not os.path.exists(ModuleHashPair):\r
+ EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CacheCrash = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ return False\r
+\r
+ try:\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ self.GenMakeHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ MakeHashStr = None\r
+ CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ if MakeHash == CurrentMakeHash:\r
+ MakeHashStr = str(MakeHash)\r
+\r
+ if not MakeHashStr:\r
+ print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ TargetHashDir = path.join(FileDir, MakeHashStr)\r
+ TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+ if not os.path.exists(TargetHashDir):\r
+ EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
+ return False\r
+\r
+ for root, dir, files in os.walk(TargetHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
+\r
+ if os.path.exists(TargetFfsHashDir):\r
+ for root, dir, files in os.walk(TargetFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakeCacheHit = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
+ return True\r
+\r
+ ## Show the first file name which causes cache miss\r
+ def PrintFirstMakeCacheMissFile(self, gDict):\r
+ if not GlobalData.gBinCacheSource:\r
+ return\r
+\r
+ # skip if the module cache already crashed\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict:\r
+ return\r
+\r
+ # Only print cache miss file for the MakeCache not hit module\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
+ return\r
+\r
+ if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
+ EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ # Find the cache dir name through the .ModuleHashPair file info\r
+ FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ if not os.path.exists(ModuleHashPair):\r
+ EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ try:\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ MakeHashSet = set()\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ TargetHashDir = path.join(FileDir, str(MakeHash))\r
+ if os.path.exists(TargetHashDir):\r
+ MakeHashSet.add(MakeHash)\r
+ if not MakeHashSet:\r
+ EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ TargetHash = list(MakeHashSet)[0]\r
+ TargetHashDir = path.join(FileDir, str(TargetHash))\r
+ if len(MakeHashSet) > 1 :\r
+ EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))\r
+\r
+ ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')\r
+ if os.path.exists(ListFile):\r
+ try:\r
+ f = open(ListFile, 'r')\r
+ CachedList = json.load(f)\r
+ f.close()\r
+ except:\r
+ EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)\r
+ return\r
+ else:\r
+ EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)\r
+ return\r
+\r
+ CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain\r
+ for idx, (file, hash) in enumerate (CurrentList):\r
+ (filecached, hashcached) = CachedList[idx]\r
+ if file != filecached:\r
+ EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))\r
+ break\r
+ if hash != hashcached:\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))\r
+ break\r
+\r
+ return True\r
+\r
## Decide whether we can skip the ModuleAutoGen process\r
- def CanSkipbyHash(self):\r
+ def CanSkipbyCache(self, gDict):\r
# Hashing feature is off\r
- if not GlobalData.gUseHashCache:\r
+ if not GlobalData.gBinCacheSource:\r
return False\r
\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gBuildHashSkipTracking:\r
- GlobalData.gBuildHashSkipTracking[self.Arch] = dict()\r
+ if self in GlobalData.gBuildHashSkipTracking:\r
+ return GlobalData.gBuildHashSkipTracking[self]\r
\r
# If library or Module is binary do not skip by hash\r
if self.IsBinaryModule:\r
+ GlobalData.gBuildHashSkipTracking[self] = False\r
return False\r
\r
# .inc is contains binary information so do not skip by hash as well\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
+ GlobalData.gBuildHashSkipTracking[self] = False\r
return False\r
\r
- # Use Cache, if exists and if Module has a copy in cache\r
- if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy():\r
+ if not (self.MetaFile.Path, self.Arch) in gDict:\r
+ return False\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
+ GlobalData.gBuildHashSkipTracking[self] = True\r
return True\r
\r
- # Early exit for libraries that haven't yet finished building\r
- HashFile = path.join(self.BuildDir, self.Name + ".hash")\r
- if self.IsLibrary and not os.path.exists(HashFile):\r
- return False\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
+ GlobalData.gBuildHashSkipTracking[self] = True\r
+ return True\r
\r
- # Return a Boolean based on if can skip by hash, either from memory or from IO.\r
- if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]:\r
- # If hashes are the same, SaveFileOnChange() will return False.\r
- GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True)\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
- else:\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
+ return False\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
# If any source file is newer than the module than we cannot skip\r
#\r
def CanSkip(self):\r
+ # Don't skip if cache feature enabled\r
+ if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r
+ return False\r
if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
return True\r
if not os.path.exists(self.TimeStampPath):\r