#\r
from __future__ import absolute_import\r
from AutoGen.AutoGen import AutoGen\r
-from Common.LongFilePathSupport import CopyLongFilePath\r
+from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath\r
from Common.BuildToolError import *\r
from Common.DataType import *\r
from Common.Misc import *\r
from .GenPcdDb import CreatePcdDatabaseCode\r
from Common.caching import cached_class_function\r
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
-from AutoGen.CacheIR import ModuleBuildCacheIR\r
import json\r
import tempfile\r
\r
def BuildCommand(self):\r
return self.PlatformInfo.BuildCommand\r
\r
- ## Get object list of all packages the module and its dependent libraries belong to\r
+ ## Get Module package and Platform package\r
+ #\r
+ # @retval list The list of package object\r
+ #\r
+ @cached_property\r
+ def PackageList(self):\r
+ PkagList = []\r
+ if self.Module.Packages:\r
+ PkagList.extend(self.Module.Packages)\r
+ Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
+ for Package in Platform.Packages:\r
+ if Package in PkagList:\r
+ continue\r
+ PkagList.append(Package)\r
+ return PkagList\r
+\r
+ ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
#\r
# @retval list The list of package object\r
#\r
@cached_property\r
def DerivedPackageList(self):\r
PackageList = []\r
- for M in [self.Module] + self.DependentLibraryList:\r
+ PackageList.extend(self.PackageList)\r
+ for M in self.DependentLibraryList:\r
for Package in M.Packages:\r
if Package in PackageList:\r
continue\r
self.Targets\r
return self._FileTypes\r
\r
- ## Get the list of package object the module depends on\r
+ ## Get the list of package object the module depends on and the Platform depends on\r
#\r
# @retval list The package object list\r
#\r
@cached_property\r
def DependentPackageList(self):\r
- return self.Module.Packages\r
+ return self.PackageList\r
\r
## Return the list of auto-generated code file\r
#\r
RetVal.append(self.MetaFile.Dir)\r
RetVal.append(self.DebugDir)\r
\r
- for Package in self.Module.Packages:\r
+ for Package in self.PackageList:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
if PackageDir not in RetVal:\r
RetVal.append(PackageDir)\r
for Inc in IncludesList:\r
if Inc not in RetVal:\r
RetVal.append(str(Inc))\r
+ RetVal.extend(self.IncPathFromBuildOptions)\r
return RetVal\r
\r
+ @cached_property\r
+ def IncPathFromBuildOptions(self):\r
+ IncPathList = []\r
+ for tool in self.BuildOption:\r
+ if 'FLAGS' in self.BuildOption[tool]:\r
+ flags = self.BuildOption[tool]['FLAGS']\r
+ whitespace = False\r
+ for flag in flags.split(" "):\r
+ flag = flag.strip()\r
+ if flag.startswith(("/I","-I")):\r
+ if len(flag)>2:\r
+ if os.path.exists(flag[2:]):\r
+ IncPathList.append(flag[2:])\r
+ else:\r
+ whitespace = True\r
+ continue\r
+ if whitespace and flag:\r
+ if os.path.exists(flag):\r
+ IncPathList.append(flag)\r
+ whitespace = False\r
+ return IncPathList\r
+\r
@cached_property\r
def IncludePathLength(self):\r
return sum(len(inc)+1 for inc in self.IncludePathList)\r
@cached_property\r
def PackageIncludePathList(self):\r
IncludesList = []\r
- for Package in self.Module.Packages:\r
+ for Package in self.PackageList:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
IncludesList = Package.Includes\r
if Package._PrivateIncludes:\r
def OutputFile(self):\r
retVal = set()\r
\r
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
- DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
- for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- NewFile = path.join(self.OutputDir, File)\r
- retVal.add(NewFile)\r
-\r
- Bin = self._GenOffsetBin()\r
- if Bin:\r
- NewFile = path.join(self.OutputDir, Bin)\r
- retVal.add(NewFile)\r
-\r
- for Root, Dirs, Files in os.walk(self.OutputDir):\r
+ for Root, Dirs, Files in os.walk(self.BuildDir):\r
for File in Files:\r
# lib file is already added through above CodaTargetList, skip it here\r
- if not (File.lower().endswith('.obj') or File.lower().endswith('.lib')):\r
- NewFile = path.join(self.OutputDir, File)\r
+ if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
+ NewFile = path.join(Root, File)\r
retVal.add(NewFile)\r
\r
for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
for File in Files:\r
- NewFile = path.join(self.FfsOutputDir, File)\r
+ NewFile = path.join(Root, File)\r
retVal.add(NewFile)\r
\r
return retVal\r
\r
self.IsAsBuiltInfCreated = True\r
\r
- def CacheCopyFile(self, OriginDir, CopyDir, File):\r
- sub_dir = os.path.relpath(File, CopyDir)\r
- destination_file = os.path.join(OriginDir, sub_dir)\r
+ def CacheCopyFile(self, DestDir, SourceDir, File):\r
+ if os.path.isdir(File):\r
+ return\r
+\r
+ sub_dir = os.path.relpath(File, SourceDir)\r
+ destination_file = os.path.join(DestDir, sub_dir)\r
destination_dir = os.path.dirname(destination_file)\r
CreateDirectory(destination_dir)\r
try:\r
return\r
\r
def CopyModuleToCache(self):\r
- self.GenPreMakefileHash(GlobalData.gCacheIR)\r
- if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
- not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
+ # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList\r
+ # and PreMakeHashFileList files\r
+ MakeHashStr = None\r
+ PreMakeHashStr = None\r
+ MakeTimeStamp = 0\r
+ PreMakeTimeStamp = 0\r
+ Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]\r
+ for File in Files:\r
+ if ".MakeHashFileList." in File:\r
+ #find lastest file through time stamp\r
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+ if FileTimeStamp > MakeTimeStamp:\r
+ MakeTimeStamp = FileTimeStamp\r
+ MakeHashStr = File.split('.')[-1]\r
+ if len(MakeHashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))\r
+ if ".PreMakeHashFileList." in File:\r
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+ if FileTimeStamp > PreMakeTimeStamp:\r
+ PreMakeTimeStamp = FileTimeStamp\r
+ PreMakeHashStr = File.split('.')[-1]\r
+ if len(PreMakeHashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))\r
\r
- self.GenMakeHash(GlobalData.gCacheIR)\r
- if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
- not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
- not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
+ if not MakeHashStr:\r
+ EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+ if not PreMakeHashStr:\r
+ EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
\r
- MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
- FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
+ # Create Cache destination dirs\r
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+ CacheFileDir = path.join(FileDir, MakeHashStr)\r
+ CacheFfsDir = path.join(FfsDir, MakeHashStr)\r
+ CreateDirectory (CacheFileDir)\r
+ CreateDirectory (CacheFfsDir)\r
\r
- CreateDirectory (FileDir)\r
- self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
- ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
- if os.path.exists(ModuleFile):\r
- CopyFileOnChange(ModuleFile, FileDir)\r
+ # Create ModuleHashPair file to support multiple version cache together\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ if os.path.exists(ModuleHashPair):\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):\r
+ ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))\r
+ with open(ModuleHashPair, 'w') as f:\r
+ json.dump(ModuleHashPairList, f, indent=2)\r
+\r
+ # Copy files to Cache destination dirs\r
if not self.OutputFile:\r
Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
self.OutputFile = Ma.Binaries\r
for File in self.OutputFile:\r
- if os.path.exists(File):\r
- if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
- self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)\r
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
+ self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)\r
+ else:\r
+ if self.Name + ".autogen.hash." in File or \\r
+ self.Name + ".autogen.hashchain." in File or \\r
+ self.Name + ".hash." in File or \\r
+ self.Name + ".hashchain." in File or \\r
+ self.Name + ".PreMakeHashFileList." in File or \\r
+ self.Name + ".MakeHashFileList." in File:\r
+ self.CacheCopyFile(FileDir, self.BuildDir, File)\r
else:\r
- self.CacheCopyFile(FileDir, self.OutputDir, File)\r
-\r
- def SaveHashChainFileToCache(self, gDict):\r
- if not GlobalData.gBinCacheDest:\r
- return False\r
-\r
- self.GenPreMakefileHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- self.GenMakeHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- # save the hash chain list as cache file\r
- MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
- CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
- ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
- MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
- ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
-\r
- # save the HashChainDict as json file\r
- CreateDirectory (CacheDestDir)\r
- CreateDirectory (CacheHashDestDir)\r
- try:\r
- ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- if os.path.exists(ModuleHashPair):\r
- with open(ModuleHashPair, 'r') as f:\r
- ModuleHashPairList = json.load(f)\r
- PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
- MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
- ModuleHashPairList.append((PreMakeHash, MakeHash))\r
- ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
- with open(ModuleHashPair, 'w') as f:\r
- json.dump(ModuleHashPairList, f, indent=2)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
- return False\r
-\r
- try:\r
- with open(MakeHashChain, 'w') as f:\r
- json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
- return False\r
-\r
- try:\r
- with open(ModuleFilesChain, 'w') as f:\r
- json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
- return False\r
-\r
- # save the autogenfile and makefile for debug usage\r
- CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
- CreateDirectory (CacheDebugDir)\r
- CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
- if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
- for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
- CopyFileOnChange(str(File), CacheDebugDir)\r
-\r
- return True\r
-\r
+ self.CacheCopyFile(CacheFileDir, self.BuildDir, File)\r
## Create makefile for the module and its dependent libraries\r
#\r
# @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
#\r
@cached_class_function\r
def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
- gDict = GlobalData.gCacheIR\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- return\r
\r
# nest this function inside it's only caller.\r
def CreateTimeStamp():\r
MakefileType = Makefile._FileType\r
MakefileName = Makefile._FILE_NAME_[MakefileType]\r
MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
-\r
- MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
- MewIR.MakefilePath = MakefilePath\r
- MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
- MewIR.CreateMakeFileDone = True\r
- with GlobalData.cache_lock:\r
- try:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakefilePath = MakefilePath\r
- IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
- IR.CreateMakeFileDone = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- except:\r
- gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+ FilePath = path.join(self.BuildDir, self.Name + ".makefile")\r
+ SaveFileOnChange(FilePath, MakefilePath, False)\r
\r
def CopyBinaryFiles(self):\r
for File in self.Module.Binaries:\r
# dependent libraries will be created\r
#\r
def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
- gDict = GlobalData.gCacheIR\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
- return\r
\r
if self.IsCodeFileCreated:\r
return\r
# CanSkip uses timestamps to determine build skipping\r
if self.CanSkip():\r
return\r
-\r
+ self.LibraryAutoGenList\r
AutoGenList = []\r
IgoredAutoGenList = []\r
\r
(" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
\r
self.IsCodeFileCreated = True\r
- MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
- MewIR.CreateCodeFileDone = True\r
- with GlobalData.cache_lock:\r
- try:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.CreateCodeFileDone = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- except:\r
- gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
\r
return AutoGenList\r
\r
self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
return RetVal\r
\r
- def GenModuleHash(self):\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gModuleHash:\r
- GlobalData.gModuleHash[self.Arch] = {}\r
+ def GenCMakeHash(self):\r
+ # GenCMakeHash can only be called in --binary-destination\r
+ # Never called in multiprocessing and always directly save result in main process,\r
+ # so no need remote dict to share the gCMakeHashFile result with main process\r
\r
- # Early exit if module or library has been hashed and is in memory\r
- if self.Name in GlobalData.gModuleHash[self.Arch]:\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+ DependencyFileSet = set()\r
+ # Add AutoGen files\r
+ if self.AutoGenFileList:\r
+ for File in set(self.AutoGenFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Add Makefile\r
+ abspath = path.join(self.BuildDir, self.Name + ".makefile")\r
+ try:\r
+ with open(LongFilePath(abspath),"r") as fd:\r
+ lines = fd.readlines()\r
+ except Exception as e:\r
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+ if lines:\r
+ DependencyFileSet.update(lines)\r
\r
+ # Caculate all above dependency files hash\r
# Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
-\r
- # Add Platform level hash\r
- m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
-\r
- # Add Package level hash\r
- if self.DependentPackageList:\r
- for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if Pkg.PackageName in GlobalData.gPackageHash:\r
- m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
-\r
- # Add Library hash\r
- if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
- Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
-\r
- # Add Module self\r
- with open(str(self.MetaFile), 'rb') as f:\r
- Content = f.read()\r
- m.update(Content)\r
-\r
- # Add Module's source files\r
- if self.SourceFileList:\r
- for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'rb')\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not path.exists(LongFilePath(str(File))):\r
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ with open(LongFilePath(str(File)), 'rb') as f:\r
Content = f.read()\r
- f.close()\r
- m.update(Content)\r
-\r
- GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
-\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
- def GenModuleFilesHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict:\r
- if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())\r
+ GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'w') as f:\r
+ json.dump(FileList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+ return False\r
\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
+ def GenModuleHash(self):\r
+ # GenModuleHash only called after autogen phase\r
+ # Never called in multiprocessing and always directly save result in main process,\r
+ # so no need remote dict to share the gModuleHashFile result with main process\r
+ #\r
+ # GenPreMakefileHashList consume no dict.\r
+ # GenPreMakefileHashList produce local gModuleHashFile dict.\r
\r
DependencyFileSet = set()\r
# Add Module Meta file\r
- DependencyFileSet.add(self.MetaFile)\r
+ DependencyFileSet.add(self.MetaFile.Path)\r
\r
# Add Module's source files\r
if self.SourceFileList:\r
for File in set(self.SourceFileList):\r
- DependencyFileSet.add(File)\r
+ DependencyFileSet.add(File.Path)\r
\r
# Add modules's include header files\r
- # Search dependency file list for each source file\r
- SourceFileList = []\r
- OutPutFileList = []\r
- for Target in self.IntroTargetList:\r
- SourceFileList.extend(Target.Inputs)\r
- OutPutFileList.extend(Target.Outputs)\r
- if OutPutFileList:\r
- for Item in OutPutFileList:\r
- if Item in SourceFileList:\r
- SourceFileList.remove(Item)\r
- SearchList = []\r
- for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
- # skip the folders in platform BuildDir which are not been generated yet\r
- if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
- continue\r
- SearchList.append(file_path)\r
- FileDependencyDict = {}\r
- ForceIncludedFile = []\r
- for F in SourceFileList:\r
- # skip the files which are not been generated yet, because\r
- # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
- if not os.path.exists(F.Path):\r
- continue\r
- FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
+ # Directly use the deps.txt file in the module BuildDir\r
+ abspath = path.join(self.BuildDir, "deps.txt")\r
+ rt = None\r
+ try:\r
+ with open(LongFilePath(abspath),"r") as fd:\r
+ lines = fd.readlines()\r
+ if lines:\r
+ rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])\r
+ except Exception as e:\r
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+\r
+ if rt:\r
+ DependencyFileSet.update(rt)\r
\r
- if FileDependencyDict:\r
- for Dependency in FileDependencyDict.values():\r
- DependencyFileSet.update(set(Dependency))\r
\r
# Caculate all above dependency files hash\r
# Initialze hash object\r
FileList = []\r
m = hashlib.md5()\r
+ BuildDirStr = path.abspath(self.BuildDir).lower()\r
for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
- if not os.path.exists(str(File)):\r
+ # Skip the AutoGen files in BuildDir which already been\r
+ # included in .autogen.hash. file\r
+ if BuildDirStr in path.abspath(File).lower():\r
+ continue\r
+ if not path.exists(LongFilePath(File)):\r
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
continue\r
- with open(str(File), 'rb') as f:\r
+ with open(LongFilePath(File), 'rb') as f:\r
Content = f.read()\r
m.update(Content)\r
- FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
-\r
+ FileList.append((File, hashlib.md5(Content).hexdigest()))\r
\r
- MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
- MewIR.ModuleFilesHashDigest = m.digest()\r
- MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
- MewIR.ModuleFilesChain = FileList\r
- with GlobalData.cache_lock:\r
- try:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.ModuleFilesHashDigest = m.digest()\r
- IR.ModuleFilesHashHexDigest = m.hexdigest()\r
- IR.ModuleFilesChain = FileList\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- except:\r
- gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
-\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
- def GenPreMakefileHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'w') as f:\r
+ json.dump(FileList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+ return False\r
\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
+ def GenPreMakefileHashList(self):\r
+ # GenPreMakefileHashList consume below dicts:\r
+ # gPlatformHashFile\r
+ # gPackageHashFile\r
+ # gModuleHashFile\r
+ # GenPreMakefileHashList produce no dict.\r
+ # gModuleHashFile items might be produced in multiprocessing, so\r
+ # need check gModuleHashFile remote dict\r
\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- self.GenModuleFilesHash(gDict)\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- # Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
-\r
# Add Platform level hash\r
- if ('PlatformHash') in gDict:\r
- m.update(gDict[('PlatformHash')].encode('utf-8'))\r
+ HashFile = GlobalData.gPlatformHashFile\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
else:\r
- EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
+ EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)\r
\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if (Pkg.PackageName, 'PackageHash') in gDict:\r
- m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
+ if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:\r
+ EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
else:\r
- EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
-\r
- # Add Library hash\r
- if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
- not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
- Lib.GenPreMakefileHash(gDict)\r
- m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
+ EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)\r
\r
# Add Module self\r
- m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
-\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.PreMakefileHashHexDigest = m.hexdigest()\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
- def GenMakeHeaderFilesHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
-\r
- # skip binary module\r
- if self.IsBinaryModule:\r
- return\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
- if self.IsLibrary:\r
- if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
- self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
- if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
- self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
- self.CreateCodeFile()\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- DependencyFileSet = set()\r
- # Add Makefile\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
- DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
+ # GenPreMakefileHashList needed in both --binary-destination\r
+ # and --hash. And --hash might save ModuleHashFile in remote dict\r
+ # during multiprocessing.\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
else:\r
- EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-\r
- # Add header files\r
- if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
- for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
- DependencyFileSet.add(File)\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
else:\r
- EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
\r
- # Add AutoGen files\r
- if self.AutoGenFileList:\r
- for File in set(self.AutoGenFileList):\r
- DependencyFileSet.add(File)\r
-\r
- # Caculate all above dependency files hash\r
- # Initialze hash object\r
- FileList = []\r
- m = hashlib.md5()\r
- for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
- if not os.path.exists(str(File)):\r
- EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
- continue\r
- f = open(str(File), 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
- FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
-\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.AutoGenFileList = self.AutoGenFileList.keys()\r
- IR.MakeHeaderFilesHashChain = FileList\r
- IR.MakeHeaderFilesHashDigest = m.digest()\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
\r
- def GenMakeHash(self, gDict):\r
- # Early exit if module or library has been hashed and is in memory\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ # Save PreMakeHashFileList\r
+ FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())\r
+ try:\r
+ with open(LongFilePath(FilePath), 'w') as f:\r
+ json.dump(FileList, f, indent=0)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)\r
\r
- # skip if the module cache already crashed\r
- if (self.MetaFile.Path, self.Arch) in gDict and \\r
- gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
+ def GenMakefileHashList(self):\r
+ # GenMakefileHashList only need in --binary-destination which will\r
+ # everything in local dict. So don't need check remote dict.\r
\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- self.GenModuleFilesHash(gDict)\r
- if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
- self.GenMakeHeaderFilesHash(gDict)\r
-\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
- EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- # Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
- MakeHashChain = []\r
+ # Add AutoGen hash\r
+ HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)\r
\r
- # Add hash of makefile and dependency header files\r
- m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
- New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
- New.sort(key=lambda x: str(x))\r
- MakeHashChain += New\r
+ # Add Module self\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
- not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
- Lib.GenMakeHash(gDict)\r
- if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
- print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
- continue\r
- m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
- New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
- New.sort(key=lambda x: str(x))\r
- MakeHashChain += New\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
\r
- # Add Module self\r
- m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
- New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
- New.sort(key=lambda x: str(x))\r
- MakeHashChain += New\r
+ # Save MakeHashFileList\r
+ FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())\r
+ try:\r
+ with open(LongFilePath(FilePath), 'w') as f:\r
+ json.dump(FileList, f, indent=0)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)\r
+\r
+ def CheckHashChainFile(self, HashChainFile):\r
+ # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'\r
+ # The x is module name and the 16BytesHexStr is md5 hexdigest of\r
+ # all hashchain files content\r
+ HashStr = HashChainFile.split('.')[-1]\r
+ if len(HashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))\r
+ return False\r
\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakeHashDigest = m.digest()\r
- IR.MakeHashHexDigest = m.hexdigest()\r
- IR.MakeHashChain = MakeHashChain\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'r') as f:\r
+ HashChainList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)\r
+ return False\r
\r
- return gDict[(self.MetaFile.Path, self.Arch)]\r
+ # Print the different file info\r
+ # print(HashChainFile)\r
+ for idx, (SrcFile, SrcHash) in enumerate (HashChainList):\r
+ if SrcFile in GlobalData.gFileHashDict:\r
+ DestHash = GlobalData.gFileHashDict[SrcFile]\r
+ else:\r
+ try:\r
+ with open(LongFilePath(SrcFile), 'rb') as f:\r
+ Content = f.read()\r
+ DestHash = hashlib.md5(Content).hexdigest()\r
+ GlobalData.gFileHashDict[SrcFile] = DestHash\r
+ except IOError as X:\r
+ # cache miss if SrcFile is removed in new version code\r
+ GlobalData.gFileHashDict[SrcFile] = 0\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+ return False\r
+ if SrcHash != DestHash:\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+ return False\r
+\r
+ return True\r
\r
## Decide whether we can skip the left autogen and make process\r
- def CanSkipbyPreMakefileCache(self, gDict):\r
+ def CanSkipbyMakeCache(self):\r
+ # For --binary-source only\r
+ # CanSkipbyMakeCache consume below dicts:\r
+ # gModuleMakeCacheStatus\r
+ # gHashChainStatus\r
+ # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.\r
+ # all these dicts might be produced in multiprocessing, so\r
+ # need check these remote dict\r
+\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
- return True\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:\r
+ return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return False\r
-\r
- # If Module is binary, do not skip by cache\r
+ # If Module is binary, which has special build rule, do not skip by cache.\r
if self.IsBinaryModule:\r
+ print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
+ # see .inc as binary file, do not skip by hash\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
+ print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # Get the module hash values from stored cache and currrent build\r
- # then check whether cache hit based on the hash values\r
- # if cache hit, restore all the files from cache\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
- if not os.path.exists(ModuleHashPair):\r
- EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.CacheCrash = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- return False\r
-\r
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
try:\r
- with open(ModuleHashPair, 'r') as f:\r
+ with open(LongFilePath(ModuleHashPair), 'r') as f:\r
ModuleHashPairList = json.load(f)\r
except:\r
+ # ModuleHashPair might not exist for new added module\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
return False\r
\r
- self.GenPreMakefileHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- MakeHashStr = None\r
- CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+ # Check the PreMakeHash in ModuleHashPairList one by one\r
for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
- if PreMakefileHash == CurrentPreMakeHash:\r
- MakeHashStr = str(MakeHash)\r
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
\r
- if not MakeHashStr:\r
- return False\r
+ try:\r
+ with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:\r
+ MakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)\r
+ continue\r
\r
- TargetHashDir = path.join(FileDir, MakeHashStr)\r
- TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+ HashMiss = False\r
+ for HashChainFile in MakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ # Convert to path start with cache source dir\r
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+ if self.CheckHashChainFile(NewFilePath):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ # Save the module self HashFile for GenPreMakefileHashList later usage\r
+ if self.Name + ".hashchain." in HashChainFile:\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
\r
- if not os.path.exists(TargetHashDir):\r
- EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
- return False\r
+ if HashMiss:\r
+ continue\r
\r
- for root, dir, files in os.walk(TargetHashDir):\r
- for f in files:\r
- File = path.join(root, f)\r
- self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
- if os.path.exists(TargetFfsHashDir):\r
- for root, dir, files in os.walk(TargetFfsHashDir):\r
+ # PreMakefile cache hit, restore the module build result\r
+ for root, dir, files in os.walk(SourceHashDir):\r
for f in files:\r
File = path.join(root, f)\r
- self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
-\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+ if os.path.exists(SourceFfsHashDir):\r
+ for root, dir, files in os.walk(SourceFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.PreMakeCacheHit = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
- #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return True\r
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- ## Decide whether we can skip the make process\r
- def CanSkipbyMakeCache(self, gDict):\r
- if not GlobalData.gBinCacheSource:\r
+ ## Decide whether we can skip the left autogen and make process\r
+ def CanSkipbyPreMakeCache(self):\r
+ # CanSkipbyPreMakeCache consume below dicts:\r
+ # gModulePreMakeCacheStatus\r
+ # gHashChainStatus\r
+ # gModuleHashFile\r
+ # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.\r
+ # all these dicts might be produced in multiprocessing, so\r
+ # need check these remote dicts\r
+\r
+ if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:\r
return False\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
- return True\r
-\r
- if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return False\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:\r
+ return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
\r
- # If Module is binary, do not skip by cache\r
+ # If Module is binary, which has special build rule, do not skip by cache.\r
if self.IsBinaryModule:\r
- print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
+ print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
+ # see .inc as binary file, do not skip by hash\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakeCacheHit = False\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
+ print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # Get the module hash values from stored cache and currrent build\r
- # then check whether cache hit based on the hash values\r
- # if cache hit, restore all the files from cache\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
-\r
- ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
- if not os.path.exists(ModuleHashPair):\r
- EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.CacheCrash = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- return False\r
-\r
- try:\r
- with open(ModuleHashPair, 'r') as f:\r
- ModuleHashPairList = json.load(f)\r
- except:\r
- EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
- return False\r
-\r
- self.GenMakeHash(gDict)\r
- if not (self.MetaFile.Path, self.Arch) in gDict or \\r
- not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
- EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return False\r
-\r
- MakeHashStr = None\r
- CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
- for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
- if MakeHash == CurrentMakeHash:\r
- MakeHashStr = str(MakeHash)\r
-\r
- if not MakeHashStr:\r
- print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
- return False\r
-\r
- TargetHashDir = path.join(FileDir, MakeHashStr)\r
- TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
- if not os.path.exists(TargetHashDir):\r
- EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
- return False\r
-\r
- for root, dir, files in os.walk(TargetHashDir):\r
- for f in files:\r
- File = path.join(root, f)\r
- self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
-\r
- if os.path.exists(TargetFfsHashDir):\r
- for root, dir, files in os.walk(TargetFfsHashDir):\r
- for f in files:\r
- File = path.join(root, f)\r
- self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
-\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- with GlobalData.cache_lock:\r
- IR = gDict[(self.MetaFile.Path, self.Arch)]\r
- IR.MakeCacheHit = True\r
- gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
- return True\r
-\r
- ## Show the first file name which causes cache miss\r
- def PrintFirstMakeCacheMissFile(self, gDict):\r
+ # For --hash only in the incremental build\r
if not GlobalData.gBinCacheSource:\r
- return\r
-\r
- # skip if the module cache already crashed\r
- if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
- return\r
-\r
- # skip binary module\r
- if self.IsBinaryModule:\r
- return\r
+ Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]\r
+ PreMakeHashFileList_FilePah = None\r
+ MakeTimeStamp = 0\r
+ # Find latest PreMakeHashFileList file in self.BuildDir folder\r
+ for File in Files:\r
+ if ".PreMakeHashFileList." in File:\r
+ FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]\r
+ if FileTimeStamp > MakeTimeStamp:\r
+ MakeTimeStamp = FileTimeStamp\r
+ PreMakeHashFileList_FilePah = File\r
+ if not PreMakeHashFileList_FilePah:\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict:\r
- return\r
+ try:\r
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+ PreMakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- # Only print cache miss file for the MakeCache not hit module\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
- return\r
+ HashMiss = False\r
+ for HashChainFile in PreMakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ if self.CheckHashChainFile(HashChainFile):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ # Save the module self HashFile for GenPreMakefileHashList later usage\r
+ if self.Name + ".hashchain." in HashChainFile:\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
\r
- if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
- EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
+ if HashMiss:\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+ else:\r
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
\r
- # Find the cache dir name through the .ModuleHashPair file info\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
- ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
- if not os.path.exists(ModuleHashPair):\r
- EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
-\r
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
try:\r
- with open(ModuleHashPair, 'r') as f:\r
+ with open(LongFilePath(ModuleHashPair), 'r') as f:\r
ModuleHashPairList = json.load(f)\r
except:\r
- EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
+ # ModuleHashPair might not exist for new added module\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ return False\r
\r
- MakeHashSet = set()\r
+ # Check the PreMakeHash in ModuleHashPairList one by one\r
for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
- TargetHashDir = path.join(FileDir, str(MakeHash))\r
- if os.path.exists(TargetHashDir):\r
- MakeHashSet.add(MakeHash)\r
- if not MakeHashSet:\r
- EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
- return\r
-\r
- TargetHash = list(MakeHashSet)[0]\r
- TargetHashDir = path.join(FileDir, str(TargetHash))\r
- if len(MakeHashSet) > 1 :\r
- EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))\r
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
\r
- ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')\r
- if os.path.exists(ListFile):\r
try:\r
- f = open(ListFile, 'r')\r
- CachedList = json.load(f)\r
- f.close()\r
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+ PreMakeHashFileList = json.load(f)\r
except:\r
- EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)\r
- return\r
- else:\r
- EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)\r
- return\r
-\r
- CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain\r
- for idx, (file, hash) in enumerate (CurrentList):\r
- (filecached, hashcached) = CachedList[idx]\r
- if file != filecached:\r
- EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))\r
- break\r
- if hash != hashcached:\r
- EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))\r
- break\r
-\r
- return True\r
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+ continue\r
\r
- ## Decide whether we can skip the ModuleAutoGen process\r
- def CanSkipbyCache(self, gDict):\r
- # Hashing feature is off\r
- if not GlobalData.gBinCacheSource:\r
- return False\r
+ HashMiss = False\r
+ for HashChainFile in PreMakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ # Convert to path start with cache source dir\r
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+ if self.CheckHashChainFile(NewFilePath):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
\r
- if self in GlobalData.gBuildHashSkipTracking:\r
- return GlobalData.gBuildHashSkipTracking[self]\r
+ if HashMiss:\r
+ continue\r
\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
- GlobalData.gBuildHashSkipTracking[self] = False\r
- return False\r
+ # PreMakefile cache hit, restore the module build result\r
+ for root, dir, files in os.walk(SourceHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+ if os.path.exists(SourceFfsHashDir):\r
+ for root, dir, files in os.walk(SourceFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- GlobalData.gBuildHashSkipTracking[self] = False\r
- return False\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
\r
- if not (self.MetaFile.Path, self.Arch) in gDict:\r
+ ## Decide whether we can skip the Module build\r
+ def CanSkipbyCache(self, gHitSet):\r
+ # Hashing feature is off\r
+ if not GlobalData.gBinCacheSource:\r
return False\r
\r
- if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
- GlobalData.gBuildHashSkipTracking[self] = True\r
- return True\r
-\r
- if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
- GlobalData.gBuildHashSkipTracking[self] = True\r
+ if self in gHitSet:\r
return True\r
\r
return False\r