self._AutoGenObject.IncludePathList + self._AutoGenObject.BuildOptionIncPathList\r
)\r
\r
+ self.DependencyHeaderFileSet = set()\r
+ if FileDependencyDict:\r
+ for Dependency in FileDependencyDict.values():\r
+ self.DependencyHeaderFileSet.update(set(Dependency))\r
+\r
# Get a set of unique package includes from MetaFile\r
parentMetaFileIncludes = set()\r
for aInclude in self._AutoGenObject.PackageIncludePathList:\r
## For creating makefile targets for dependent libraries\r
def ProcessDependentLibrary(self):\r
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:\r
- if not LibraryAutoGen.IsBinaryModule and not LibraryAutoGen.CanSkipbyHash():\r
+ if not LibraryAutoGen.IsBinaryModule:\r
self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros))\r
\r
## Return a list containing source file's dependencies\r
def GetFileDependency(self, FileList, ForceInculeList, SearchPathList):\r
Dependency = {}\r
for F in FileList:\r
- Dependency[F] = self.GetDependencyList(F, ForceInculeList, SearchPathList)\r
+ Dependency[F] = GetDependencyList(self._AutoGenObject, self.FileCache, F, ForceInculeList, SearchPathList)\r
return Dependency\r
\r
- ## Find dependencies for one source file\r
- #\r
- # By searching recursively "#include" directive in file, find out all the\r
- # files needed by given source file. The dependencies will be only searched\r
- # in given search path list.\r
- #\r
- # @param File The source file\r
- # @param ForceInculeList The list of files which will be included forcely\r
- # @param SearchPathList The list of search path\r
- #\r
- # @retval list The list of files the given source file depends on\r
- #\r
- def GetDependencyList(self, File, ForceList, SearchPathList):\r
- EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File)\r
- FileStack = [File] + ForceList\r
- DependencySet = set()\r
-\r
- if self._AutoGenObject.Arch not in gDependencyDatabase:\r
- gDependencyDatabase[self._AutoGenObject.Arch] = {}\r
- DepDb = gDependencyDatabase[self._AutoGenObject.Arch]\r
-\r
- while len(FileStack) > 0:\r
- F = FileStack.pop()\r
-\r
- FullPathDependList = []\r
- if F in self.FileCache:\r
- for CacheFile in self.FileCache[F]:\r
- FullPathDependList.append(CacheFile)\r
- if CacheFile not in DependencySet:\r
- FileStack.append(CacheFile)\r
- DependencySet.update(FullPathDependList)\r
- continue\r
-\r
- CurrentFileDependencyList = []\r
- if F in DepDb:\r
- CurrentFileDependencyList = DepDb[F]\r
- else:\r
- try:\r
- Fd = open(F.Path, 'rb')\r
- FileContent = Fd.read()\r
- Fd.close()\r
- except BaseException as X:\r
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))\r
- if len(FileContent) == 0:\r
- continue\r
- try:\r
- if FileContent[0] == 0xff or FileContent[0] == 0xfe:\r
- FileContent = FileContent.decode('utf-16')\r
- else:\r
- FileContent = FileContent.decode()\r
- except:\r
- # The file is not txt file. for example .mcb file\r
- continue\r
- IncludedFileList = gIncludePattern.findall(FileContent)\r
-\r
- for Inc in IncludedFileList:\r
- Inc = Inc.strip()\r
- # if there's macro used to reference header file, expand it\r
- HeaderList = gMacroPattern.findall(Inc)\r
- if len(HeaderList) == 1 and len(HeaderList[0]) == 2:\r
- HeaderType = HeaderList[0][0]\r
- HeaderKey = HeaderList[0][1]\r
- if HeaderType in gIncludeMacroConversion:\r
- Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey}\r
- else:\r
- # not known macro used in #include, always build the file by\r
- # returning a empty dependency\r
- self.FileCache[File] = []\r
- return []\r
- Inc = os.path.normpath(Inc)\r
- CurrentFileDependencyList.append(Inc)\r
- DepDb[F] = CurrentFileDependencyList\r
-\r
- CurrentFilePath = F.Dir\r
- PathList = [CurrentFilePath] + SearchPathList\r
- for Inc in CurrentFileDependencyList:\r
- for SearchPath in PathList:\r
- FilePath = os.path.join(SearchPath, Inc)\r
- if FilePath in gIsFileMap:\r
- if not gIsFileMap[FilePath]:\r
- continue\r
- # If isfile is called too many times, the performance is slow down.\r
- elif not os.path.isfile(FilePath):\r
- gIsFileMap[FilePath] = False\r
- continue\r
- else:\r
- gIsFileMap[FilePath] = True\r
- FilePath = PathClass(FilePath)\r
- FullPathDependList.append(FilePath)\r
- if FilePath not in DependencySet:\r
- FileStack.append(FilePath)\r
- break\r
- else:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\\r
- "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList)))\r
-\r
- self.FileCache[F] = FullPathDependList\r
- DependencySet.update(FullPathDependList)\r
-\r
- DependencySet.update(ForceList)\r
- if File in DependencySet:\r
- DependencySet.remove(File)\r
- DependencyList = list(DependencySet) # remove duplicate ones\r
-\r
- return DependencyList\r
\r
## CustomMakefile class\r
#\r
def GetLibraryBuildDirectoryList(self):\r
DirList = []\r
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:\r
- if not LibraryAutoGen.IsBinaryModule and not LibraryAutoGen.CanSkipbyHash():\r
+ if not LibraryAutoGen.IsBinaryModule:\r
DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))\r
return DirList\r
\r
def GetLibraryBuildDirectoryList(self):\r
DirList = []\r
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:\r
- if not LibraryAutoGen.IsBinaryModule and not LibraryAutoGen.CanSkipbyHash():\r
+ if not LibraryAutoGen.IsBinaryModule:\r
DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))\r
return DirList\r
\r
+## Find dependencies for one source file\r
+#\r
+# By searching recursively "#include" directive in file, find out all the\r
+# files needed by given source file. The dependencies will be only searched\r
+# in given search path list.\r
+#\r
+# @param File The source file\r
+# @param ForceInculeList The list of files which will be included forcely\r
+# @param SearchPathList The list of search path\r
+#\r
+# @retval list The list of files the given source file depends on\r
+#\r
+def GetDependencyList(AutoGenObject, FileCache, File, ForceList, SearchPathList):\r
+ EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File)\r
+ FileStack = [File] + ForceList\r
+ DependencySet = set()\r
+\r
+ if AutoGenObject.Arch not in gDependencyDatabase:\r
+ gDependencyDatabase[AutoGenObject.Arch] = {}\r
+ DepDb = gDependencyDatabase[AutoGenObject.Arch]\r
+\r
+ while len(FileStack) > 0:\r
+ F = FileStack.pop()\r
+\r
+ FullPathDependList = []\r
+ if F in FileCache:\r
+ for CacheFile in FileCache[F]:\r
+ FullPathDependList.append(CacheFile)\r
+ if CacheFile not in DependencySet:\r
+ FileStack.append(CacheFile)\r
+ DependencySet.update(FullPathDependList)\r
+ continue\r
+\r
+ CurrentFileDependencyList = []\r
+ if F in DepDb:\r
+ CurrentFileDependencyList = DepDb[F]\r
+ else:\r
+ try:\r
+ Fd = open(F.Path, 'rb')\r
+ FileContent = Fd.read()\r
+ Fd.close()\r
+ except BaseException as X:\r
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))\r
+ if len(FileContent) == 0:\r
+ continue\r
+ try:\r
+ if FileContent[0] == 0xff or FileContent[0] == 0xfe:\r
+ FileContent = FileContent.decode('utf-16')\r
+ else:\r
+ FileContent = FileContent.decode()\r
+ except:\r
+ # The file is not txt file. for example .mcb file\r
+ continue\r
+ IncludedFileList = gIncludePattern.findall(FileContent)\r
+\r
+ for Inc in IncludedFileList:\r
+ Inc = Inc.strip()\r
+ # if there's macro used to reference header file, expand it\r
+ HeaderList = gMacroPattern.findall(Inc)\r
+ if len(HeaderList) == 1 and len(HeaderList[0]) == 2:\r
+ HeaderType = HeaderList[0][0]\r
+ HeaderKey = HeaderList[0][1]\r
+ if HeaderType in gIncludeMacroConversion:\r
+ Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey}\r
+ else:\r
+ # not known macro used in #include, always build the file by\r
+ # returning a empty dependency\r
+ FileCache[File] = []\r
+ return []\r
+ Inc = os.path.normpath(Inc)\r
+ CurrentFileDependencyList.append(Inc)\r
+ DepDb[F] = CurrentFileDependencyList\r
+\r
+ CurrentFilePath = F.Dir\r
+ PathList = [CurrentFilePath] + SearchPathList\r
+ for Inc in CurrentFileDependencyList:\r
+ for SearchPath in PathList:\r
+ FilePath = os.path.join(SearchPath, Inc)\r
+ if FilePath in gIsFileMap:\r
+ if not gIsFileMap[FilePath]:\r
+ continue\r
+ # If isfile is called too many times, the performance is slow down.\r
+ elif not os.path.isfile(FilePath):\r
+ gIsFileMap[FilePath] = False\r
+ continue\r
+ else:\r
+ gIsFileMap[FilePath] = True\r
+ FilePath = PathClass(FilePath)\r
+ FullPathDependList.append(FilePath)\r
+ if FilePath not in DependencySet:\r
+ FileStack.append(FilePath)\r
+ break\r
+ else:\r
+ EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\\r
+ "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList)))\r
+\r
+ FileCache[F] = FullPathDependList\r
+ DependencySet.update(FullPathDependList)\r
+\r
+ DependencySet.update(ForceList)\r
+ if File in DependencySet:\r
+ DependencySet.remove(File)\r
+ DependencyList = list(DependencySet) # remove duplicate ones\r
+\r
+ return DependencyList\r
+\r
# This acts like the main() function for the script, unless it is 'import'ed into another script.\r
if __name__ == '__main__':\r
- pass\r
-\r
+ pass\r
\ No newline at end of file
from .GenPcdDb import CreatePcdDatabaseCode\r
from Common.caching import cached_class_function\r
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
+from AutoGen.CacheIR import ModuleBuildCacheIR\r
+import json\r
\r
## Mapping Makefile type\r
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
self.AutoGenDepSet = set()\r
self.ReferenceModules = []\r
self.ConstPcd = {}\r
+ self.Makefile = None\r
+ self.FileDependCache = {}\r
\r
def __init_platform_info__(self):\r
pinfo = self.DataPipe.Get("P_Info")\r
\r
self.IsAsBuiltInfCreated = True\r
\r
+ def CacheCopyFile(self, OriginDir, CopyDir, File):\r
+ sub_dir = os.path.relpath(File, CopyDir)\r
+ destination_file = os.path.join(OriginDir, sub_dir)\r
+ destination_dir = os.path.dirname(destination_file)\r
+ CreateDirectory(destination_dir)\r
+ try:\r
+ CopyFileOnChange(File, destination_dir)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r
+ return\r
+\r
def CopyModuleToCache(self):\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ self.GenPreMakefileHash(GlobalData.gCacheIR)\r
+ if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
+ not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ self.GenMakeHash(GlobalData.gCacheIR)\r
+ if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
+ not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
+ not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
+\r
CreateDirectory (FileDir)\r
- HashFile = path.join(self.BuildDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- CopyFileOnChange(HashFile, FileDir)\r
+ self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
if os.path.exists(ModuleFile):\r
CopyFileOnChange(ModuleFile, FileDir)\r
CreateDirectory(destination_dir)\r
CopyFileOnChange(File, destination_dir)\r
\r
- def AttemptModuleCacheCopy(self):\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
+ def SaveHashChainFileToCache(self, gDict):\r
+ if not GlobalData.gBinCacheDest:\r
+ return False\r
+\r
+ self.GenPreMakefileHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ self.GenMakeHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
return False\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- return False\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- HashFile = path.join(FileDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- f = open(HashFile, 'r')\r
- CacheHash = f.read()\r
- f.close()\r
- self.GenModuleHash()\r
- if GlobalData.gModuleHash[self.Arch][self.Name]:\r
- if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r
- for root, dir, files in os.walk(FileDir):\r
- for f in files:\r
- if self.Name + '.hash' in f:\r
- CopyFileOnChange(HashFile, self.BuildDir)\r
- else:\r
- File = path.join(root, f)\r
- sub_dir = os.path.relpath(File, FileDir)\r
- destination_file = os.path.join(self.OutputDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- return True\r
- return False\r
+\r
+ # save the hash chain list as cache file\r
+ MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
+ CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
+ ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
+ MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
+ ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
+\r
+ # save the HashChainDict as json file\r
+ CreateDirectory (CacheDestDir)\r
+ CreateDirectory (CacheHashDestDir)\r
+ try:\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ if os.path.exists(ModuleHashPair):\r
+ f = open(ModuleHashPair, 'r')\r
+ ModuleHashPairList = json.load(f)\r
+ f.close()\r
+ PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+ MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
+ ModuleHashPairList.append((PreMakeHash, MakeHash))\r
+ ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
+ with open(ModuleHashPair, 'w') as f:\r
+ json.dump(ModuleHashPairList, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ try:\r
+ with open(MakeHashChain, 'w') as f:\r
+ json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
+ return False\r
+\r
+ try:\r
+ with open(ModuleFilesChain, 'w') as f:\r
+ json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
+ return False\r
+\r
+ # save the autogenfile and makefile for debug usage\r
+ CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
+ CreateDirectory (CacheDebugDir)\r
+ CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
+ if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
+ for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
+ CopyFileOnChange(str(File), CacheDebugDir)\r
+\r
+ return True\r
\r
## Create makefile for the module and its dependent libraries\r
#\r
#\r
@cached_class_function\r
def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
+ gDict = GlobalData.gCacheIR\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
+ return\r
+\r
# nest this function inside it's only caller.\r
def CreateTimeStamp():\r
FileSet = {self.MetaFile.Path}\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateMakeFile()\r
\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
+ # CanSkip uses timestamps to determine build skipping\r
+ if self.CanSkip():\r
return\r
\r
if len(self.CustomMakefile) == 0:\r
\r
CreateTimeStamp()\r
\r
+ MakefileType = Makefile._FileType\r
+ MakefileName = Makefile._FILE_NAME_[MakefileType]\r
+ MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
+\r
+ MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
+ MewIR.MakefilePath = MakefilePath\r
+ MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
+ MewIR.CreateMakeFileDone = True\r
+ with GlobalData.file_lock:\r
+ try:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakefilePath = MakefilePath\r
+ IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
+ IR.CreateMakeFileDone = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ except:\r
+ gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+\r
def CopyBinaryFiles(self):\r
for File in self.Module.Binaries:\r
SrcPath = File.Path\r
# dependent libraries will be created\r
#\r
def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
+ gDict = GlobalData.gCacheIR\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
+ return\r
+\r
if self.IsCodeFileCreated:\r
return\r
\r
if not self.IsLibrary and CreateLibraryCodeFile:\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateCodeFile()\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
+\r
+ # CanSkip uses timestamps to determine build skipping\r
+ if self.CanSkip():\r
return\r
\r
AutoGenList = []\r
(" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
\r
self.IsCodeFileCreated = True\r
+ MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
+ MewIR.CreateCodeFileDone = True\r
+ with GlobalData.file_lock:\r
+ try:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CreateCodeFileDone = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ except:\r
+ gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+\r
return AutoGenList\r
\r
## Summarize the ModuleAutoGen objects of all libraries used by this module\r
\r
return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
\r
+ def GenModuleFilesHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict:\r
+ if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ DependencyFileSet = set()\r
+ # Add Module Meta file\r
+ DependencyFileSet.add(self.MetaFile)\r
+\r
+ # Add Module's source files\r
+ if self.SourceFileList:\r
+ for File in set(self.SourceFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Add modules's include header files\r
+ # Search dependency file list for each source file\r
+ SourceFileList = []\r
+ OutPutFileList = []\r
+ for Target in self.IntroTargetList:\r
+ SourceFileList.extend(Target.Inputs)\r
+ OutPutFileList.extend(Target.Outputs)\r
+ if OutPutFileList:\r
+ for Item in OutPutFileList:\r
+ if Item in SourceFileList:\r
+ SourceFileList.remove(Item)\r
+ SearchList = []\r
+ for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
+ # skip the folders in platform BuildDir which are not been generated yet\r
+ if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
+ continue\r
+ SearchList.append(file_path)\r
+ FileDependencyDict = {}\r
+ ForceIncludedFile = []\r
+ for F in SourceFileList:\r
+ # skip the files which are not been generated yet, because\r
+ # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
+ if not os.path.exists(F.Path):\r
+ continue\r
+ FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
+\r
+ if FileDependencyDict:\r
+ for Dependency in FileDependencyDict.values():\r
+ DependencyFileSet.update(set(Dependency))\r
+\r
+ # Caculate all above dependency files hash\r
+ # Initialze hash object\r
+ FileList = []\r
+ m = hashlib.md5()\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not os.path.exists(str(File)):\r
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ f = open(str(File), 'rb')\r
+ Content = f.read()\r
+ f.close()\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+\r
+\r
+ MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
+ MewIR.ModuleFilesHashDigest = m.digest()\r
+ MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
+ MewIR.ModuleFilesChain = FileList\r
+ with GlobalData.file_lock:\r
+ try:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.ModuleFilesHashDigest = m.digest()\r
+ IR.ModuleFilesHashHexDigest = m.hexdigest()\r
+ IR.ModuleFilesChain = FileList\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ except:\r
+ gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ def GenPreMakefileHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
+ self.GenModuleFilesHash(gDict)\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ # Initialze hash object\r
+ m = hashlib.md5()\r
+\r
+ # Add Platform level hash\r
+ if ('PlatformHash') in gDict:\r
+ m.update(gDict[('PlatformHash')].encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
+\r
+ # Add Package level hash\r
+ if self.DependentPackageList:\r
+ for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
+ if (Pkg.PackageName, 'PackageHash') in gDict:\r
+ m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
+\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
+ if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
+ not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
+ Lib.GenPreMakefileHash(gDict)\r
+ m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
+\r
+ # Add Module self\r
+ m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
+\r
+ with GlobalData.file_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.PreMakefileHashHexDigest = m.hexdigest()\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ def GenMakeHeaderFilesHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
+ if self.IsLibrary:\r
+ if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
+ self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
+ if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
+ self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
+ self.CreateCodeFile()\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
+ self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
+ EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ DependencyFileSet = set()\r
+ # Add Makefile\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
+ DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+\r
+ # Add header files\r
+ if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
+ for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
+ DependencyFileSet.add(File)\r
+ else:\r
+ EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+\r
+ # Add AutoGen files\r
+ if self.AutoGenFileList:\r
+ for File in set(self.AutoGenFileList):\r
+ DependencyFileSet.add(File)\r
+\r
+ # Caculate all above dependency files hash\r
+ # Initialze hash object\r
+ FileList = []\r
+ m = hashlib.md5()\r
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+ if not os.path.exists(str(File)):\r
+ EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+ continue\r
+ f = open(str(File), 'rb')\r
+ Content = f.read()\r
+ f.close()\r
+ m.update(Content)\r
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+\r
+ with GlobalData.file_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.AutoGenFileList = self.AutoGenFileList.keys()\r
+ IR.MakeHeaderFilesHashChain = FileList\r
+ IR.MakeHeaderFilesHashDigest = m.digest()\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ def GenMakeHash(self, gDict):\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ # skip binary module\r
+ if self.IsBinaryModule:\r
+ return\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
+ self.GenModuleFilesHash(gDict)\r
+ if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
+ self.GenMakeHeaderFilesHash(gDict)\r
+\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
+ EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
+\r
+ # Initialze hash object\r
+ m = hashlib.md5()\r
+ MakeHashChain = []\r
+\r
+ # Add hash of makefile and dependency header files\r
+ m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
+ New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
+ New.sort(key=lambda x: str(x))\r
+ MakeHashChain += New\r
+\r
+ # Add Library hash\r
+ if self.LibraryAutoGenList:\r
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
+ if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
+ not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
+ Lib.GenMakeHash(gDict)\r
+ if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
+ print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
+ continue\r
+ m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
+ New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
+ New.sort(key=lambda x: str(x))\r
+ MakeHashChain += New\r
+\r
+ # Add Module self\r
+ m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
+ New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
+ New.sort(key=lambda x: str(x))\r
+ MakeHashChain += New\r
+\r
+ with GlobalData.file_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakeHashDigest = m.digest()\r
+ IR.MakeHashHexDigest = m.hexdigest()\r
+ IR.MakeHashChain = MakeHashChain\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+\r
+ return gDict[(self.MetaFile.Path, self.Arch)]\r
+\r
+ ## Decide whether we can skip the left autogen and make process\r
+ def CanSkipbyPreMakefileCache(self, gDict):\r
+ if not GlobalData.gBinCacheSource:\r
+ return False\r
+\r
+ # If Module is binary, do not skip by cache\r
+ if self.IsBinaryModule:\r
+ return False\r
+\r
+ # .inc is contains binary information so do not skip by hash as well\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ return False\r
+\r
+ # Get the module hash values from stored cache and currrent build\r
+ # then check whether cache hit based on the hash values\r
+ # if cache hit, restore all the files from cache\r
+ FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ if not os.path.exists(ModuleHashPair):\r
+ EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ try:\r
+ f = open(ModuleHashPair, 'r')\r
+ ModuleHashPairList = json.load(f)\r
+ f.close()\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ self.GenPreMakefileHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ MakeHashStr = None\r
+ CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ if PreMakefileHash == CurrentPreMakeHash:\r
+ MakeHashStr = str(MakeHash)\r
+\r
+ if not MakeHashStr:\r
+ return False\r
+\r
+ TargetHashDir = path.join(FileDir, MakeHashStr)\r
+ TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+\r
+ if not os.path.exists(TargetHashDir):\r
+ EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
+ return False\r
+\r
+ for root, dir, files in os.walk(TargetHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
+ if os.path.exists(TargetFfsHashDir):\r
+ for root, dir, files in os.walk(TargetFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+ with GlobalData.file_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.PreMakeCacheHit = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
+ #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+ return True\r
+\r
+ ## Decide whether we can skip the make process\r
+ def CanSkipbyMakeCache(self, gDict):\r
+ if not GlobalData.gBinCacheSource:\r
+ return False\r
+\r
+ # If Module is binary, do not skip by cache\r
+ if self.IsBinaryModule:\r
+ print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ # .inc is contains binary information so do not skip by hash as well\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ with GlobalData.file_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakeCacheHit = False\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ # Get the module hash values from stored cache and currrent build\r
+ # then check whether cache hit based on the hash values\r
+ # if cache hit, restore all the files from cache\r
+ FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+\r
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+ if not os.path.exists(ModuleHashPair):\r
+ EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ try:\r
+ f = open(ModuleHashPair, 'r')\r
+ ModuleHashPairList = json.load(f)\r
+ f.close()\r
+ except:\r
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+ return False\r
+\r
+ self.GenMakeHash(gDict)\r
+ if not (self.MetaFile.Path, self.Arch) in gDict or \\r
+ not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
+ EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return False\r
+\r
+ MakeHashStr = None\r
+ CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
+ if MakeHash == CurrentMakeHash:\r
+ MakeHashStr = str(MakeHash)\r
+\r
+ if not MakeHashStr:\r
+ print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
+ return False\r
+\r
+ TargetHashDir = path.join(FileDir, MakeHashStr)\r
+ TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+ if not os.path.exists(TargetHashDir):\r
+ EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
+ return False\r
+\r
+ for root, dir, files in os.walk(TargetHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
+\r
+ if os.path.exists(TargetFfsHashDir):\r
+ for root, dir, files in os.walk(TargetFfsHashDir):\r
+ for f in files:\r
+ File = path.join(root, f)\r
+ self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
+\r
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+ with GlobalData.file_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.MakeCacheHit = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+ print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
+ return True\r
+\r
## Decide whether we can skip the ModuleAutoGen process\r
- def CanSkipbyHash(self):\r
+ def CanSkipbyCache(self, gDict):\r
# Hashing feature is off\r
- if not GlobalData.gUseHashCache:\r
+ if not GlobalData.gBinCacheSource:\r
return False\r
\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gBuildHashSkipTracking:\r
- GlobalData.gBuildHashSkipTracking[self.Arch] = dict()\r
+ if self in GlobalData.gBuildHashSkipTracking:\r
+ return GlobalData.gBuildHashSkipTracking[self]\r
\r
# If library or Module is binary do not skip by hash\r
if self.IsBinaryModule:\r
+ GlobalData.gBuildHashSkipTracking[self] = False\r
return False\r
\r
# .inc is contains binary information so do not skip by hash as well\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
+ GlobalData.gBuildHashSkipTracking[self] = False\r
return False\r
\r
- # Use Cache, if exists and if Module has a copy in cache\r
- if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy():\r
+ if not (self.MetaFile.Path, self.Arch) in gDict:\r
+ return False\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
+ GlobalData.gBuildHashSkipTracking[self] = True\r
return True\r
\r
- # Early exit for libraries that haven't yet finished building\r
- HashFile = path.join(self.BuildDir, self.Name + ".hash")\r
- if self.IsLibrary and not os.path.exists(HashFile):\r
- return False\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
+ GlobalData.gBuildHashSkipTracking[self] = True\r
+ return True\r
\r
- # Return a Boolean based on if can skip by hash, either from memory or from IO.\r
- if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]:\r
- # If hashes are the same, SaveFileOnChange() will return False.\r
- GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True)\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
- else:\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
+ return False\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
# If any source file is newer than the module than we cannot skip\r
#\r
def CanSkip(self):\r
+ # Don't skip if cache feature enabled\r
+ if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r
+ return False\r
if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
return True\r
if not os.path.exists(self.TimeStampPath):\r
#\r
def AddDependency(self, Dependency):\r
for Dep in Dependency:\r
- if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyHash():\r
+ if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gCacheIR):\r
self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list\r
\r
## The thread wrapper of LaunchCommand function\r
self.AutoGenMgr = None\r
EdkLogger.info("")\r
os.chdir(self.WorkspaceDir)\r
- self.share_data = Manager().dict()\r
+ GlobalData.gCacheIR = Manager().dict()\r
self.log_q = log_q\r
def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,share_data):\r
try:\r
feedback_q = mp.Queue()\r
file_lock = mp.Lock()\r
error_event = mp.Event()\r
+ GlobalData.file_lock = file_lock\r
+ FfsCmd = DataPipe.Get("FfsCommand")\r
+ if FfsCmd is None:\r
+ FfsCmd = {}\r
+ GlobalData.FfsCmd = FfsCmd\r
+ GlobalData.libConstPcd = DataPipe.Get("LibConstPcd")\r
+ GlobalData.Refes = DataPipe.Get("REFS")\r
auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)]\r
self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)\r
self.AutoGenMgr.start()\r
w.start()\r
if PcdMaList is not None:\r
for PcdMa in PcdMaList:\r
+ if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
+ PcdMa.GenModuleFilesHash(share_data)\r
+ PcdMa.GenPreMakefileHash(share_data)\r
+ if PcdMa.CanSkipbyPreMakefileCache(share_data):\r
+ continue\r
+\r
PcdMa.CreateCodeFile(False)\r
PcdMa.CreateMakeFile(False,GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.File, PcdMa.Arch),[]))\r
\r
+ if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
+ PcdMa.GenMakeHeaderFilesHash(share_data)\r
+ PcdMa.GenMakeHash(share_data)\r
+ if PcdMa.CanSkipbyMakeCache(share_data):\r
+ continue\r
+\r
self.AutoGenMgr.join()\r
rt = self.AutoGenMgr.Status\r
return rt, 0\r
- except Exception as e:\r
- return False,e.errcode\r
+ except FatalError as e:\r
+ return False, e.args[0]\r
+ except:\r
+ return False, UNKNOWN_ERROR\r
\r
## Load configuration\r
#\r
mqueue.put(m)\r
\r
AutoGenObject.DataPipe.DataContainer = {"FfsCommand":FfsCommand}\r
+ AutoGenObject.DataPipe.DataContainer = {"CommandTarget": self.Target}\r
self.Progress.Start("Generating makefile and code")\r
data_pipe_file = os.path.join(AutoGenObject.BuildDir, "GlobalVar_%s_%s.bin" % (str(AutoGenObject.Guid),AutoGenObject.Arch))\r
AutoGenObject.DataPipe.dump(data_pipe_file)\r
- autogen_rt, errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList,self.share_data)\r
+ autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, GlobalData.gCacheIR)\r
self.Progress.Stop("done!")\r
if not autogen_rt:\r
self.AutoGenMgr.TerminateWorkers()\r
CmdListDict = None\r
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:\r
CmdListDict = self._GenFfsCmd(Wa.ArchList)\r
+\r
+ # Add Platform and Package level hash in share_data for module hash calculation later\r
+ if GlobalData.gBinCacheSource or GlobalData.gBinCacheDest:\r
+ GlobalData.gCacheIR[('PlatformHash')] = GlobalData.gPlatformHash\r
+ for PkgName in GlobalData.gPackageHash.keys():\r
+ GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]\r
+ GlobalData.file_lock = mp.Lock()\r
+ GlobalData.FfsCmd = CmdListDict\r
+\r
self.Progress.Stop("done!")\r
MaList = []\r
ExitFlag = threading.Event()\r
AutoGenStart = time.time()\r
GlobalData.gGlobalDefines['ARCH'] = Arch\r
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)\r
+ GlobalData.libConstPcd = Pa.DataPipe.Get("LibConstPcd")\r
+ GlobalData.Refes = Pa.DataPipe.Get("REFS")\r
for Module in Pa.Platform.Modules:\r
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:\r
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)\r
if Ma is None:\r
continue\r
MaList.append(Ma)\r
- if Ma.CanSkipbyHash():\r
- self.HashSkipModules.append(Ma)\r
- if GlobalData.gBinCacheSource:\r
- EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
- continue\r
- else:\r
- if GlobalData.gBinCacheSource:\r
- EdkLogger.quiet("cache miss: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
+\r
+ if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
+ Ma.GenModuleFilesHash(GlobalData.gCacheIR)\r
+ Ma.GenPreMakefileHash(GlobalData.gCacheIR)\r
+ if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR):\r
+ self.HashSkipModules.append(Ma)\r
+ EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
+ continue\r
+\r
# Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds'\r
if self.Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:\r
# for target which must generate AutoGen code and makefile\r
self.Progress.Stop("done!")\r
if self.Target == "genmake":\r
return True\r
+\r
+ if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:\r
+ Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR)\r
+ Ma.GenMakeHash(GlobalData.gCacheIR)\r
+ if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR):\r
+ self.HashSkipModules.append(Ma)\r
+ EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
+ continue\r
+ else:\r
+ EdkLogger.quiet("cache miss: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
+ Ma.PrintFirstMakeCacheMissFile(GlobalData.gCacheIR)\r
+\r
self.BuildModules.append(Ma)\r
# Initialize all modules in tracking to 'FAIL'\r
if Ma.Arch not in GlobalData.gModuleBuildTracking:\r
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:\r
CmdListDict = self._GenFfsCmd(Wa.ArchList)\r
\r
+ # Add Platform and Package level hash in share_data for module hash calculation later\r
+ if GlobalData.gBinCacheSource or GlobalData.gBinCacheDest:\r
+ GlobalData.gCacheIR[('PlatformHash')] = GlobalData.gPlatformHash\r
+ for PkgName in GlobalData.gPackageHash.keys():\r
+ GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]\r
+\r
# multi-thread exit flag\r
ExitFlag = threading.Event()\r
ExitFlag.clear()\r
self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))\r
self.BuildModules = []\r
+ TotalModules = []\r
for Arch in Wa.ArchList:\r
PcdMaList = []\r
AutoGenStart = time.time()\r
ModuleList.append(Inf)\r
Pa.DataPipe.DataContainer = {"FfsCommand":CmdListDict}\r
Pa.DataPipe.DataContainer = {"Workspace_timestamp": Wa._SrcTimeStamp}\r
+ Pa.DataPipe.DataContainer = {"CommandTarget": self.Target}\r
for Module in ModuleList:\r
# Get ModuleAutoGen object to generate C code file and makefile\r
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)\r
Ma.PlatformInfo = Pa\r
Ma.Workspace = Wa\r
PcdMaList.append(Ma)\r
- if Ma.CanSkipbyHash():\r
- self.HashSkipModules.append(Ma)\r
- if GlobalData.gBinCacheSource:\r
- EdkLogger.quiet("cache hit: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
- continue\r
- else:\r
- if GlobalData.gBinCacheSource:\r
- EdkLogger.quiet("cache miss: %s[%s]" % (Ma.MetaFile.Path, Ma.Arch))\r
-\r
- # Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds'\r
- # for target which must generate AutoGen code and makefile\r
-\r
- self.BuildModules.append(Ma)\r
+ TotalModules.append(Ma)\r
# Initialize all modules in tracking to 'FAIL'\r
if Ma.Arch not in GlobalData.gModuleBuildTracking:\r
GlobalData.gModuleBuildTracking[Ma.Arch] = dict()\r
if Ma not in GlobalData.gModuleBuildTracking[Ma.Arch]:\r
GlobalData.gModuleBuildTracking[Ma.Arch][Ma] = 'FAIL'\r
+\r
mqueue = mp.Queue()\r
for m in Pa.GetAllModuleInfo:\r
mqueue.put(m)\r
data_pipe_file = os.path.join(Pa.BuildDir, "GlobalVar_%s_%s.bin" % (str(Pa.Guid),Pa.Arch))\r
Pa.DataPipe.dump(data_pipe_file)\r
- autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList,self.share_data)\r
+ autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList, GlobalData.gCacheIR)\r
+\r
+ # Skip cache hit modules\r
+ if GlobalData.gBinCacheSource:\r
+ for Ma in TotalModules:\r
+ if (Ma.MetaFile.Path, Ma.Arch) in GlobalData.gCacheIR and \\r
+ GlobalData.gCacheIR[(Ma.MetaFile.Path, Ma.Arch)].PreMakeCacheHit:\r
+ self.HashSkipModules.append(Ma)\r
+ continue\r
+ if (Ma.MetaFile.Path, Ma.Arch) in GlobalData.gCacheIR and \\r
+ GlobalData.gCacheIR[(Ma.MetaFile.Path, Ma.Arch)].MakeCacheHit:\r
+ self.HashSkipModules.append(Ma)\r
+ continue\r
+ self.BuildModules.append(Ma)\r
+ else:\r
+ self.BuildModules.extend(TotalModules)\r
\r
if not autogen_rt:\r
self.AutoGenMgr.TerminateWorkers()\r
raise FatalError(errorcode)\r
self.AutoGenTime += int(round((time.time() - AutoGenStart)))\r
self.Progress.Stop("done!")\r
+\r
+ if GlobalData.gBinCacheSource:\r
+ EdkLogger.quiet("Total cache hit driver num: %s, cache miss driver num: %s" % (len(set(self.HashSkipModules)), len(set(self.BuildModules))))\r
+ CacheHitMa = set()\r
+ CacheNotHitMa = set()\r
+ for IR in GlobalData.gCacheIR.keys():\r
+ if 'PlatformHash' in IR or 'PackageHash' in IR:\r
+ continue\r
+ if GlobalData.gCacheIR[IR].PreMakeCacheHit or GlobalData.gCacheIR[IR].MakeCacheHit:\r
+ CacheHitMa.add(IR)\r
+ else:\r
+ # There might be binary module or module which has .inc files, not count for cache miss\r
+ CacheNotHitMa.add(IR)\r
+ EdkLogger.quiet("Total module num: %s, cache hit module num: %s" % (len(CacheHitMa)+len(CacheNotHitMa), len(CacheHitMa)))\r
+\r
for Arch in Wa.ArchList:\r
MakeStart = time.time()\r
- for Ma in self.BuildModules:\r
+ for Ma in set(self.BuildModules):\r
# Generate build task for the module\r
if not Ma.IsBinaryModule:\r
Bt = BuildTask.New(ModuleMakeUnit(Ma, Pa.BuildCommand,self.Target))\r