#\r
from __future__ import absolute_import\r
from AutoGen.AutoGen import AutoGen\r
-from Common.LongFilePathSupport import CopyLongFilePath\r
+from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath\r
from Common.BuildToolError import *\r
from Common.DataType import *\r
from Common.Misc import *\r
from .GenPcdDb import CreatePcdDatabaseCode\r
from Common.caching import cached_class_function\r
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
+import json\r
+import tempfile\r
\r
## Mapping Makefile type\r
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
#\r
-# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
+# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC\r
# is the former use /I , the Latter used -I to specify include directories\r
#\r
gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
self.AutoGenDepSet = set()\r
self.ReferenceModules = []\r
self.ConstPcd = {}\r
+ self.FileDependCache = {}\r
\r
def __init_platform_info__(self):\r
pinfo = self.DataPipe.Get("P_Info")\r
#\r
@cached_class_function\r
def __hash__(self):\r
- return hash((self.MetaFile, self.Arch))\r
+ return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))\r
def __repr__(self):\r
return "%s [%s]" % (self.MetaFile, self.Arch)\r
\r
ModuleNames = self.DataPipe.Get("M_Name")\r
if not ModuleNames:\r
return self.Name\r
- return ModuleNames.get(self.Name,self.Name)\r
+ return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r
\r
# Macros could be used in build_rule.txt (also Makefile)\r
@cached_property\r
def BuildCommand(self):\r
return self.PlatformInfo.BuildCommand\r
\r
- ## Get object list of all packages the module and its dependent libraries belong to\r
+ ## Get Module package and Platform package\r
+ #\r
+ # @retval list The list of package object\r
+ #\r
+ @cached_property\r
+ def PackageList(self):\r
+ PkagList = []\r
+ if self.Module.Packages:\r
+ PkagList.extend(self.Module.Packages)\r
+ Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
+ for Package in Platform.Packages:\r
+ if Package in PkagList:\r
+ continue\r
+ PkagList.append(Package)\r
+ return PkagList\r
+\r
+ ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
#\r
# @retval list The list of package object\r
#\r
@cached_property\r
def DerivedPackageList(self):\r
PackageList = []\r
- for M in [self.Module] + self.DependentLibraryList:\r
+ PackageList.extend(self.PackageList)\r
+ for M in self.DependentLibraryList:\r
for Package in M.Packages:\r
if Package in PackageList:\r
continue\r
@cached_property\r
def BuildOptionIncPathList(self):\r
#\r
- # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
+ # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC\r
# is the former use /I , the Latter used -I to specify include directories\r
#\r
if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r
BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r
- elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
+ elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC'):\r
BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r
else:\r
#\r
except KeyError:\r
FlagOption = ''\r
\r
- if self.ToolChainFamily != 'RVCT':\r
- IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
- else:\r
- #\r
- # RVCT may specify a list of directory seperated by commas\r
- #\r
- IncPathList = []\r
- for Path in BuildOptIncludeRegEx.findall(FlagOption):\r
- PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r
- IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r
+ IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
\r
#\r
# EDK II modules must not reference header files outside of the packages they depend on or\r
SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r
if not os.path.exists(SubDirectory):\r
CreateDirectory(SubDirectory)\r
- LastTarget = None\r
+ TargetList = set()\r
+ FinalTargetName = set()\r
RuleChain = set()\r
SourceList = [File]\r
Index = 0\r
self.BuildOption\r
\r
while Index < len(SourceList):\r
+ # Reset the FileType if not the first iteration.\r
+ if Index > 0:\r
+ FileType = TAB_UNKNOWN_FILE\r
Source = SourceList[Index]\r
Index = Index + 1\r
\r
elif Source.Ext in self.BuildRules:\r
RuleObject = self.BuildRules[Source.Ext]\r
else:\r
- # stop at no more rules\r
- if LastTarget:\r
- self._FinalBuildTargetList.add(LastTarget)\r
- break\r
+ # No more rule to apply: Source is a final target.\r
+ FinalTargetName.add(Source)\r
+ continue\r
\r
FileType = RuleObject.SourceFileType\r
self._FileTypes[FileType].add(Source)\r
\r
# stop at STATIC_LIBRARY for library\r
if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r
- if LastTarget:\r
- self._FinalBuildTargetList.add(LastTarget)\r
- break\r
+ FinalTargetName.add(Source)\r
+ continue\r
\r
Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r
if not Target:\r
- if LastTarget:\r
- self._FinalBuildTargetList.add(LastTarget)\r
- break\r
- elif not Target.Outputs:\r
- # Only do build for target with outputs\r
- self._FinalBuildTargetList.add(Target)\r
+ # No Target: Source is a final target.\r
+ FinalTargetName.add(Source)\r
+ continue\r
\r
+ TargetList.add(Target)\r
self._BuildTargets[FileType].add(Target)\r
\r
if not Source.IsBinary and Source == File:\r
\r
# to avoid cyclic rule\r
if FileType in RuleChain:\r
- break\r
+ EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))\r
\r
RuleChain.add(FileType)\r
SourceList.extend(Target.Outputs)\r
- LastTarget = Target\r
- FileType = TAB_UNKNOWN_FILE\r
+\r
+ # For each final target name, retrieve the corresponding TargetDescBlock instance.\r
+ for FTargetName in FinalTargetName:\r
+ for Target in TargetList:\r
+ if FTargetName == Target.Target:\r
+ self._FinalBuildTargetList.add(Target)\r
\r
@cached_property\r
def Targets(self):\r
self.Targets\r
return self._FileTypes\r
\r
- ## Get the list of package object the module depends on\r
+ ## Get the list of package object the module depends on and the Platform depends on\r
#\r
# @retval list The package object list\r
#\r
@cached_property\r
def DependentPackageList(self):\r
- return self.Module.Packages\r
+ return self.PackageList\r
\r
## Return the list of auto-generated code file\r
#\r
@cached_property\r
def ModulePcdList(self):\r
# apply PCD settings from platform\r
- RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r
+ RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)\r
\r
return RetVal\r
@cached_property\r
continue\r
Pcds.add(Key)\r
PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r
- RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r
+ RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))\r
return RetVal\r
\r
## Get the GUID value mapping\r
RetVal.append(self.MetaFile.Dir)\r
RetVal.append(self.DebugDir)\r
\r
- for Package in self.Module.Packages:\r
+ for Package in self.PackageList:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
if PackageDir not in RetVal:\r
RetVal.append(PackageDir)\r
for Inc in IncludesList:\r
if Inc not in RetVal:\r
RetVal.append(str(Inc))\r
+ RetVal.extend(self.IncPathFromBuildOptions)\r
return RetVal\r
\r
+ @cached_property\r
+ def IncPathFromBuildOptions(self):\r
+ IncPathList = []\r
+ for tool in self.BuildOption:\r
+ if 'FLAGS' in self.BuildOption[tool]:\r
+ flags = self.BuildOption[tool]['FLAGS']\r
+ whitespace = False\r
+ for flag in flags.split(" "):\r
+ flag = flag.strip()\r
+ if flag.startswith(("/I","-I")):\r
+ if len(flag)>2:\r
+ if os.path.exists(flag[2:]):\r
+ IncPathList.append(flag[2:])\r
+ else:\r
+ whitespace = True\r
+ continue\r
+ if whitespace and flag:\r
+ if os.path.exists(flag):\r
+ IncPathList.append(flag)\r
+ whitespace = False\r
+ return IncPathList\r
+\r
@cached_property\r
def IncludePathLength(self):\r
return sum(len(inc)+1 for inc in self.IncludePathList)\r
\r
+ ## Get the list of include paths from the packages\r
+ #\r
+ # @IncludesList list The list path\r
+ #\r
+ @cached_property\r
+ def PackageIncludePathList(self):\r
+ IncludesList = []\r
+ for Package in self.PackageList:\r
+ PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
+ IncludesList = Package.Includes\r
+ if Package._PrivateIncludes:\r
+ if not self.MetaFile.Path.startswith(PackageDir):\r
+ IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
+ return IncludesList\r
+\r
## Get HII EX PCDs which maybe used by VFR\r
#\r
# efivarstore used by VFR may relate with HII EX PCDs\r
fStringIO.close ()\r
fInputfile.close ()\r
return OutputName\r
+\r
@cached_property\r
def OutputFile(self):\r
retVal = set()\r
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
- DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
- for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- retVal.add(File)\r
- if self.DepexGenerated:\r
- retVal.add(self.Name + '.depex')\r
\r
- Bin = self._GenOffsetBin()\r
- if Bin:\r
- retVal.add(Bin)\r
+ for Root, Dirs, Files in os.walk(self.BuildDir):\r
+ for File in Files:\r
+ # lib file is already added through above CodaTargetList, skip it here\r
+ if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
+ NewFile = path.join(Root, File)\r
+ retVal.add(NewFile)\r
\r
- for Root, Dirs, Files in os.walk(OutputDir):\r
+ for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
for File in Files:\r
- if File.lower().endswith('.pdb'):\r
- retVal.add(File)\r
+ NewFile = path.join(Root, File)\r
+ retVal.add(NewFile)\r
\r
return retVal\r
\r
\r
self.IsAsBuiltInfCreated = True\r
\r
+ def CacheCopyFile(self, DestDir, SourceDir, File):\r
+ if os.path.isdir(File):\r
+ return\r
+\r
+ sub_dir = os.path.relpath(File, SourceDir)\r
+ destination_file = os.path.join(DestDir, sub_dir)\r
+ destination_dir = os.path.dirname(destination_file)\r
+ CreateDirectory(destination_dir)\r
+ try:\r
+ CopyFileOnChange(File, destination_dir)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r
+ return\r
+\r
def CopyModuleToCache(self):\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- CreateDirectory (FileDir)\r
- HashFile = path.join(self.BuildDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- CopyFileOnChange(HashFile, FileDir)\r
- ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
- if os.path.exists(ModuleFile):\r
- CopyFileOnChange(ModuleFile, FileDir)\r
+ # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList\r
+ # and PreMakeHashFileList files\r
+ MakeHashStr = None\r
+ PreMakeHashStr = None\r
+ MakeTimeStamp = 0\r
+ PreMakeTimeStamp = 0\r
+ Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]\r
+ for File in Files:\r
+ if ".MakeHashFileList." in File:\r
+ #find lastest file through time stamp\r
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+ if FileTimeStamp > MakeTimeStamp:\r
+ MakeTimeStamp = FileTimeStamp\r
+ MakeHashStr = File.split('.')[-1]\r
+ if len(MakeHashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))\r
+ if ".PreMakeHashFileList." in File:\r
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+ if FileTimeStamp > PreMakeTimeStamp:\r
+ PreMakeTimeStamp = FileTimeStamp\r
+ PreMakeHashStr = File.split('.')[-1]\r
+ if len(PreMakeHashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))\r
+\r
+ if not MakeHashStr:\r
+ EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+ if not PreMakeHashStr:\r
+ EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ # Create Cache destination dirs\r
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+ CacheFileDir = path.join(FileDir, MakeHashStr)\r
+ CacheFfsDir = path.join(FfsDir, MakeHashStr)\r
+ CreateDirectory (CacheFileDir)\r
+ CreateDirectory (CacheFfsDir)\r
+\r
+ # Create ModuleHashPair file to support multiple version cache together\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ if os.path.exists(ModuleHashPair):\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):\r
+ ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))\r
+ with open(ModuleHashPair, 'w') as f:\r
+ json.dump(ModuleHashPairList, f, indent=2)\r
+\r
+ # Copy files to Cache destination dirs\r
if not self.OutputFile:\r
Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
self.OutputFile = Ma.Binaries\r
for File in self.OutputFile:\r
- File = str(File)\r
- if not os.path.isabs(File):\r
- File = os.path.join(self.OutputDir, File)\r
- if os.path.exists(File):\r
- sub_dir = os.path.relpath(File, self.OutputDir)\r
- destination_file = os.path.join(FileDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
-\r
- def AttemptModuleCacheCopy(self):\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
- return False\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- return False\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- HashFile = path.join(FileDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- f = open(HashFile, 'r')\r
- CacheHash = f.read()\r
- f.close()\r
- self.GenModuleHash()\r
- if GlobalData.gModuleHash[self.Arch][self.Name]:\r
- if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r
- for root, dir, files in os.walk(FileDir):\r
- for f in files:\r
- if self.Name + '.hash' in f:\r
- CopyFileOnChange(HashFile, self.BuildDir)\r
- else:\r
- File = path.join(root, f)\r
- sub_dir = os.path.relpath(File, FileDir)\r
- destination_file = os.path.join(self.OutputDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- return True\r
- return False\r
-\r
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
+ self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)\r
+ else:\r
+ if self.Name + ".autogen.hash." in File or \\r
+ self.Name + ".autogen.hashchain." in File or \\r
+ self.Name + ".hash." in File or \\r
+ self.Name + ".hashchain." in File or \\r
+ self.Name + ".PreMakeHashFileList." in File or \\r
+ self.Name + ".MakeHashFileList." in File:\r
+ self.CacheCopyFile(FileDir, self.BuildDir, File)\r
+ else:\r
+ self.CacheCopyFile(CacheFileDir, self.BuildDir, File)\r
## Create makefile for the module and its dependent libraries\r
#\r
# @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
#\r
@cached_class_function\r
def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
+\r
# nest this function inside it's only caller.\r
def CreateTimeStamp():\r
FileSet = {self.MetaFile.Path}\r
\r
if os.path.exists (self.TimeStampPath):\r
os.remove (self.TimeStampPath)\r
- with open(self.TimeStampPath, 'w+') as fd:\r
- for f in FileSet:\r
- fd.write(f)\r
- fd.write("\n")\r
+\r
+ SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
\r
# Ignore generating makefile when it is a binary module\r
if self.IsBinaryModule:\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateMakeFile()\r
\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
+ # CanSkip uses timestamps to determine build skipping\r
+ if self.CanSkip():\r
return\r
\r
if len(self.CustomMakefile) == 0:\r
\r
CreateTimeStamp()\r
\r
+ MakefileType = Makefile._FileType\r
+ MakefileName = Makefile._FILE_NAME_[MakefileType]\r
+ MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
+ FilePath = path.join(self.BuildDir, self.Name + ".makefile")\r
+ SaveFileOnChange(FilePath, MakefilePath, False)\r
+\r
def CopyBinaryFiles(self):\r
for File in self.Module.Binaries:\r
SrcPath = File.Path\r
# dependent libraries will be created\r
#\r
def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
+\r
if self.IsCodeFileCreated:\r
return\r
\r
if not self.IsLibrary and CreateLibraryCodeFile:\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateCodeFile()\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
- return\r
\r
+ self.LibraryAutoGenList\r
AutoGenList = []\r
IgoredAutoGenList = []\r
\r
(" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
\r
self.IsCodeFileCreated = True\r
+\r
return AutoGenList\r
\r
## Summarize the ModuleAutoGen objects of all libraries used by this module\r
self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
return RetVal\r
\r
+ def GenCMakeHash(self):\r
+ # GenCMakeHash can only be called in --binary-destination\r
+ # Never called in multiprocessing and always directly save result in main process,\r
+ # so no need remote dict to share the gCMakeHashFile result with main process\r
+\r
+ DependencyFileSet = set()\r
+ # Add AutoGen files\r
+ if self.AutoGenFileList:\r
+ for File in set(self.AutoGenFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Add Makefile\r
+ abspath = path.join(self.BuildDir, self.Name + ".makefile")\r
+ try:\r
+ with open(LongFilePath(abspath),"r") as fd:\r
+ lines = fd.readlines()\r
+ except Exception as e:\r
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+ if lines:\r
+ DependencyFileSet.update(lines)\r
+\r
+ # Caculate all above dependency files hash\r
+ # Initialze hash object\r
+ FileList = []\r
+ m = hashlib.md5()\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not path.exists(LongFilePath(str(File))):\r
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ with open(LongFilePath(str(File)), 'rb') as f:\r
+ Content = f.read()\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+\r
+ HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())\r
+ GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'w') as f:\r
+ json.dump(FileList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+ return False\r
+\r
def GenModuleHash(self):\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gModuleHash:\r
- GlobalData.gModuleHash[self.Arch] = {}\r
+ # GenModuleHash only called after autogen phase\r
+ # Never called in multiprocessing and always directly save result in main process,\r
+ # so no need remote dict to share the gModuleHashFile result with main process\r
+ #\r
+ # GenPreMakefileHashList consume no dict.\r
+ # GenPreMakefileHashList produce local gModuleHashFile dict.\r
+\r
+ DependencyFileSet = set()\r
+ # Add Module Meta file\r
+ DependencyFileSet.add(self.MetaFile.Path)\r
\r
- # Early exit if module or library has been hashed and is in memory\r
- if self.Name in GlobalData.gModuleHash[self.Arch]:\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+ # Add Module's source files\r
+ if self.SourceFileList:\r
+ for File in set(self.SourceFileList):\r
+ DependencyFileSet.add(File.Path)\r
+\r
+ # Add modules's include header files\r
+ # Directly use the deps.txt file in the module BuildDir\r
+ abspath = path.join(self.BuildDir, "deps.txt")\r
+ rt = None\r
+ try:\r
+ with open(LongFilePath(abspath),"r") as fd:\r
+ lines = fd.readlines()\r
+ if lines:\r
+ rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])\r
+ except Exception as e:\r
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
\r
+ if rt:\r
+ DependencyFileSet.update(rt)\r
+\r
+\r
+ # Caculate all above dependency files hash\r
# Initialze hash object\r
+ FileList = []\r
m = hashlib.md5()\r
+ BuildDirStr = path.abspath(self.BuildDir).lower()\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ # Skip the AutoGen files in BuildDir which already been\r
+ # included in .autogen.hash. file\r
+ if BuildDirStr in path.abspath(File).lower():\r
+ continue\r
+ if not path.exists(LongFilePath(File)):\r
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ with open(LongFilePath(File), 'rb') as f:\r
+ Content = f.read()\r
+ m.update(Content)\r
+ FileList.append((File, hashlib.md5(Content).hexdigest()))\r
+\r
+ HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'w') as f:\r
+ json.dump(FileList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+ return False\r
\r
+ def GenPreMakefileHashList(self):\r
+ # GenPreMakefileHashList consume below dicts:\r
+ # gPlatformHashFile\r
+ # gPackageHashFile\r
+ # gModuleHashFile\r
+ # GenPreMakefileHashList produce no dict.\r
+ # gModuleHashFile items might be produced in multiprocessing, so\r
+ # need check gModuleHashFile remote dict\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ FileList = []\r
+ m = hashlib.md5()\r
# Add Platform level hash\r
- m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
+ HashFile = GlobalData.gPlatformHashFile\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)\r
\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if Pkg.PackageName in GlobalData.gPackageHash:\r
- m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
+ if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:\r
+ EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)\r
+\r
+ # Add Module self\r
+ # GenPreMakefileHashList needed in both --binary-destination\r
+ # and --hash. And --hash might save ModuleHashFile in remote dict\r
+ # during multiprocessing.\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
- Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
+\r
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
+\r
+ # Save PreMakeHashFileList\r
+ FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())\r
+ try:\r
+ with open(LongFilePath(FilePath), 'w') as f:\r
+ json.dump(FileList, f, indent=0)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)\r
+\r
+ def GenMakefileHashList(self):\r
+ # GenMakefileHashList only need in --binary-destination which will\r
+ # everything in local dict. So don't need check remote dict.\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ FileList = []\r
+ m = hashlib.md5()\r
+ # Add AutoGen hash\r
+ HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
\r
- # Add Module's source files\r
- if self.SourceFileList:\r
- for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+ else:\r
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+ if path.exists(LongFilePath(HashFile)):\r
+ FileList.append(HashFile)\r
+ m.update(HashFile.encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
+\r
+ # Save MakeHashFileList\r
+ FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())\r
+ try:\r
+ with open(LongFilePath(FilePath), 'w') as f:\r
+ json.dump(FileList, f, indent=0)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)\r
+\r
+ def CheckHashChainFile(self, HashChainFile):\r
+ # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'\r
+ # The x is module name and the 16BytesHexStr is md5 hexdigest of\r
+ # all hashchain files content\r
+ HashStr = HashChainFile.split('.')[-1]\r
+ if len(HashStr) != 32:\r
+ EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))\r
+ return False\r
+\r
+ try:\r
+ with open(LongFilePath(HashChainFile), 'r') as f:\r
+ HashChainList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)\r
+ return False\r
\r
- GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
+ # Print the different file info\r
+ # print(HashChainFile)\r
+ for idx, (SrcFile, SrcHash) in enumerate (HashChainList):\r
+ if SrcFile in GlobalData.gFileHashDict:\r
+ DestHash = GlobalData.gFileHashDict[SrcFile]\r
+ else:\r
+ try:\r
+ with open(LongFilePath(SrcFile), 'rb') as f:\r
+ Content = f.read()\r
+ DestHash = hashlib.md5(Content).hexdigest()\r
+ GlobalData.gFileHashDict[SrcFile] = DestHash\r
+ except IOError as X:\r
+ # cache miss if SrcFile is removed in new version code\r
+ GlobalData.gFileHashDict[SrcFile] = 0\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+ return False\r
+ if SrcHash != DestHash:\r
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+ return False\r
\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+ return True\r
\r
- ## Decide whether we can skip the ModuleAutoGen process\r
- def CanSkipbyHash(self):\r
- # Hashing feature is off\r
- if not GlobalData.gUseHashCache:\r
+ ## Decide whether we can skip the left autogen and make process\r
+ def CanSkipbyMakeCache(self):\r
+ # For --binary-source only\r
+ # CanSkipbyMakeCache consume below dicts:\r
+ # gModuleMakeCacheStatus\r
+ # gHashChainStatus\r
+ # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.\r
+ # all these dicts might be produced in multiprocessing, so\r
+ # need check these remote dict\r
+\r
+ if not GlobalData.gBinCacheSource:\r
return False\r
\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gBuildHashSkipTracking:\r
- GlobalData.gBuildHashSkipTracking[self.Arch] = dict()\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:\r
+ return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
\r
- # If library or Module is binary do not skip by hash\r
+ # If Module is binary, which has special build rule, do not skip by cache.\r
if self.IsBinaryModule:\r
+ print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # .inc is contains binary information so do not skip by hash as well\r
+ # see .inc as binary file, do not skip by hash\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
+ print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
return False\r
\r
- # Use Cache, if exists and if Module has a copy in cache\r
- if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy():\r
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
+ try:\r
+ with open(LongFilePath(ModuleHashPair), 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ except:\r
+ # ModuleHashPair might not exist for new added module\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ # Check the PreMakeHash in ModuleHashPairList one by one\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
+\r
+ try:\r
+ with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:\r
+ MakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)\r
+ continue\r
+\r
+ HashMiss = False\r
+ for HashChainFile in MakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ # Convert to path start with cache source dir\r
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+ if self.CheckHashChainFile(NewFilePath):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ # Save the module self HashFile for GenPreMakefileHashList later usage\r
+ if self.Name + ".hashchain." in HashChainFile:\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
+\r
+ if HashMiss:\r
+ continue\r
+\r
+ # PreMakefile cache hit, restore the module build result\r
+ for root, dir, files in os.walk(SourceHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+ if os.path.exists(SourceFfsHashDir):\r
+ for root, dir, files in os.walk(SourceFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
return True\r
\r
- # Early exit for libraries that haven't yet finished building\r
- HashFile = path.join(self.BuildDir, self.Name + ".hash")\r
- if self.IsLibrary and not os.path.exists(HashFile):\r
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+\r
+ ## Decide whether we can skip the left autogen and make process\r
+ def CanSkipbyPreMakeCache(self):\r
+ # CanSkipbyPreMakeCache consume below dicts:\r
+ # gModulePreMakeCacheStatus\r
+ # gHashChainStatus\r
+ # gModuleHashFile\r
+ # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.\r
+ # all these dicts might be produced in multiprocessing, so\r
+ # need check these remote dicts\r
+\r
+ if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:\r
return False\r
\r
- # Return a Boolean based on if can skip by hash, either from memory or from IO.\r
- if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]:\r
- # If hashes are the same, SaveFileOnChange() will return False.\r
- GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True)\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
- else:\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:\r
+ return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # If Module is binary, which has special build rule, do not skip by cache.\r
+ if self.IsBinaryModule:\r
+ print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+\r
+ # see .inc as binary file, do not skip by hash\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+\r
+ # For --hash only in the incremental build\r
+ if not GlobalData.gBinCacheSource:\r
+ Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]\r
+ PreMakeHashFileList_FilePah = None\r
+ MakeTimeStamp = 0\r
+ # Find latest PreMakeHashFileList file in self.BuildDir folder\r
+ for File in Files:\r
+ if ".PreMakeHashFileList." in File:\r
+ FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]\r
+ if FileTimeStamp > MakeTimeStamp:\r
+ MakeTimeStamp = FileTimeStamp\r
+ PreMakeHashFileList_FilePah = File\r
+ if not PreMakeHashFileList_FilePah:\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+\r
+ try:\r
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+ PreMakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+\r
+ HashMiss = False\r
+ for HashChainFile in PreMakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ if self.CheckHashChainFile(HashChainFile):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ # Save the module self HashFile for GenPreMakefileHashList later usage\r
+ if self.Name + ".hashchain." in HashChainFile:\r
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
+\r
+ if HashMiss:\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+ else:\r
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
+\r
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
+ try:\r
+ with open(LongFilePath(ModuleHashPair), 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
+ except:\r
+ # ModuleHashPair might not exist for new added module\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ # Check the PreMakeHash in ModuleHashPairList one by one\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
+\r
+ try:\r
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+ PreMakeHashFileList = json.load(f)\r
+ except:\r
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+ continue\r
+\r
+ HashMiss = False\r
+ for HashChainFile in PreMakeHashFileList:\r
+ HashChainStatus = None\r
+ if HashChainFile in GlobalData.gHashChainStatus:\r
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+ if HashChainStatus == False:\r
+ HashMiss = True\r
+ break\r
+ elif HashChainStatus == True:\r
+ continue\r
+ # Convert to path start with cache source dir\r
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+ if self.CheckHashChainFile(NewFilePath):\r
+ GlobalData.gHashChainStatus[HashChainFile] = True\r
+ else:\r
+ GlobalData.gHashChainStatus[HashChainFile] = False\r
+ HashMiss = True\r
+ break\r
+\r
+ if HashMiss:\r
+ continue\r
+\r
+ # PreMakefile cache hit, restore the module build result\r
+ for root, dir, files in os.walk(SourceHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+ if os.path.exists(SourceFfsHashDir):\r
+ for root, dir, files in os.walk(SourceFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+ return True\r
+\r
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+ return False\r
+\r
+ ## Decide whether we can skip the Module build\r
+ def CanSkipbyCache(self, gHitSet):\r
+ # Hashing feature is off\r
+ if not GlobalData.gBinCacheSource:\r
+ return False\r
+\r
+ if self in gHitSet:\r
+ return True\r
+\r
+ return False\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
# If any source file is newer than the module than we cannot skip\r
#\r
def CanSkip(self):\r
+ # Don't skip if cache feature enabled\r
+ if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r
+ return False\r
if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
return True\r
if not os.path.exists(self.TimeStampPath):\r