X-Git-Url: https://git.proxmox.com/?a=blobdiff_plain;f=BaseTools%2FSource%2FPython%2FAutoGen%2FModuleAutoGen.py;h=d05410b32966bfbeaf3e514f1a68ea72be90e873;hb=c7c25997595aa34ce0a7a21ca2e1fc5b0f9b38a6;hp=076ce0e39c37cc4661d4aa27dfe07ae751842925;hpb=76e12fa33416f3133c41c6e396a69abbe32f2edb;p=mirror_edk2.git diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py old mode 100644 new mode 100755 index 076ce0e39c..d05410b329 --- a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py +++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py @@ -6,7 +6,7 @@ # from __future__ import absolute_import from AutoGen.AutoGen import AutoGen -from Common.LongFilePathSupport import CopyLongFilePath +from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath from Common.BuildToolError import * from Common.DataType import * from Common.Misc import * @@ -26,11 +26,13 @@ from Workspace.MetaFileCommentParser import UsageList from .GenPcdDb import CreatePcdDatabaseCode from Common.caching import cached_class_function from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo +import json +import tempfile ## Mapping Makefile type gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"} # -# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT +# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC # is the former use /I , the Latter used -I to specify include directories # gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL) @@ -252,6 +254,7 @@ class ModuleAutoGen(AutoGen): self.AutoGenDepSet = set() self.ReferenceModules = [] self.ConstPcd = {} + self.FileDependCache = {} def __init_platform_info__(self): pinfo = self.DataPipe.Get("P_Info") @@ -266,7 +269,7 @@ class ModuleAutoGen(AutoGen): # @cached_class_function def __hash__(self): - return hash((self.MetaFile, self.Arch)) + return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget)) def __repr__(self): return "%s [%s]" % (self.MetaFile, self.Arch) @@ -457,14 +460,31 @@ class ModuleAutoGen(AutoGen): def BuildCommand(self): return self.PlatformInfo.BuildCommand - ## Get object list of all packages the module and its dependent libraries belong to + ## Get Module package and Platform package + # + # @retval list The list of package object + # + @cached_property + def PackageList(self): + PkagList = [] + if self.Module.Packages: + PkagList.extend(self.Module.Packages) + Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain] + for Package in Platform.Packages: + if Package in PkagList: + continue + PkagList.append(Package) + return PkagList + + ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on # # @retval list The list of package object # @cached_property def DerivedPackageList(self): PackageList = [] - for M in [self.Module] + self.DependentLibraryList: + PackageList.extend(self.PackageList) + for M in self.DependentLibraryList: for Package in M.Packages: if Package in PackageList: continue @@ -664,12 +684,12 @@ class ModuleAutoGen(AutoGen): @cached_property def BuildOptionIncPathList(self): # - # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT + # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC # is the former use /I , the Latter used -I to specify include directories # if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT): BuildOptIncludeRegEx = gBuildOptIncludePatternMsft - elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'): + elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC'): BuildOptIncludeRegEx = gBuildOptIncludePatternOther else: # @@ -684,16 +704,7 @@ class ModuleAutoGen(AutoGen): except KeyError: FlagOption = '' - if self.ToolChainFamily != 'RVCT': - IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)] - else: - # - # RVCT may specify a list of directory seperated by commas - # - IncPathList = [] - for Path in BuildOptIncludeRegEx.findall(FlagOption): - PathList = GetSplitList(Path, TAB_COMMA_SPLIT) - IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList) + IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)] # # EDK II modules must not reference header files outside of the packages they depend on or @@ -839,7 +850,8 @@ class ModuleAutoGen(AutoGen): SubDirectory = os.path.join(self.OutputDir, File.SubDir) if not os.path.exists(SubDirectory): CreateDirectory(SubDirectory) - LastTarget = None + TargetList = set() + FinalTargetName = set() RuleChain = set() SourceList = [File] Index = 0 @@ -849,6 +861,9 @@ class ModuleAutoGen(AutoGen): self.BuildOption while Index < len(SourceList): + # Reset the FileType if not the first iteration. + if Index > 0: + FileType = TAB_UNKNOWN_FILE Source = SourceList[Index] Index = Index + 1 @@ -865,29 +880,25 @@ class ModuleAutoGen(AutoGen): elif Source.Ext in self.BuildRules: RuleObject = self.BuildRules[Source.Ext] else: - # stop at no more rules - if LastTarget: - self._FinalBuildTargetList.add(LastTarget) - break + # No more rule to apply: Source is a final target. + FinalTargetName.add(Source) + continue FileType = RuleObject.SourceFileType self._FileTypes[FileType].add(Source) # stop at STATIC_LIBRARY for library if self.IsLibrary and FileType == TAB_STATIC_LIBRARY: - if LastTarget: - self._FinalBuildTargetList.add(LastTarget) - break + FinalTargetName.add(Source) + continue Target = RuleObject.Apply(Source, self.BuildRuleOrder) if not Target: - if LastTarget: - self._FinalBuildTargetList.add(LastTarget) - break - elif not Target.Outputs: - # Only do build for target with outputs - self._FinalBuildTargetList.add(Target) + # No Target: Source is a final target. + FinalTargetName.add(Source) + continue + TargetList.add(Target) self._BuildTargets[FileType].add(Target) if not Source.IsBinary and Source == File: @@ -895,12 +906,16 @@ class ModuleAutoGen(AutoGen): # to avoid cyclic rule if FileType in RuleChain: - break + EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source)) RuleChain.add(FileType) SourceList.extend(Target.Outputs) - LastTarget = Target - FileType = TAB_UNKNOWN_FILE + + # For each final target name, retrieve the corresponding TargetDescBlock instance. + for FTargetName in FinalTargetName: + for Target in TargetList: + if FTargetName == Target.Target: + self._FinalBuildTargetList.add(Target) @cached_property def Targets(self): @@ -933,13 +948,13 @@ class ModuleAutoGen(AutoGen): self.Targets return self._FileTypes - ## Get the list of package object the module depends on + ## Get the list of package object the module depends on and the Platform depends on # # @retval list The package object list # @cached_property def DependentPackageList(self): - return self.Module.Packages + return self.PackageList ## Return the list of auto-generated code file # @@ -1007,7 +1022,7 @@ class ModuleAutoGen(AutoGen): @cached_property def ModulePcdList(self): # apply PCD settings from platform - RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds) + RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds) return RetVal @cached_property @@ -1038,7 +1053,7 @@ class ModuleAutoGen(AutoGen): continue Pcds.add(Key) PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key]) - RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library)) + RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library)) return RetVal ## Get the GUID value mapping @@ -1096,7 +1111,7 @@ class ModuleAutoGen(AutoGen): RetVal.append(self.MetaFile.Dir) RetVal.append(self.DebugDir) - for Package in self.Module.Packages: + for Package in self.PackageList: PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir) if PackageDir not in RetVal: RetVal.append(PackageDir) @@ -1107,8 +1122,31 @@ class ModuleAutoGen(AutoGen): for Inc in IncludesList: if Inc not in RetVal: RetVal.append(str(Inc)) + RetVal.extend(self.IncPathFromBuildOptions) return RetVal + @cached_property + def IncPathFromBuildOptions(self): + IncPathList = [] + for tool in self.BuildOption: + if 'FLAGS' in self.BuildOption[tool]: + flags = self.BuildOption[tool]['FLAGS'] + whitespace = False + for flag in flags.split(" "): + flag = flag.strip() + if flag.startswith(("/I","-I")): + if len(flag)>2: + if os.path.exists(flag[2:]): + IncPathList.append(flag[2:]) + else: + whitespace = True + continue + if whitespace and flag: + if os.path.exists(flag): + IncPathList.append(flag) + whitespace = False + return IncPathList + @cached_property def IncludePathLength(self): return sum(len(inc)+1 for inc in self.IncludePathList) @@ -1120,7 +1158,7 @@ class ModuleAutoGen(AutoGen): @cached_property def PackageIncludePathList(self): IncludesList = [] - for Package in self.Module.Packages: + for Package in self.PackageList: PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir) IncludesList = Package.Includes if Package._PrivateIncludes: @@ -1258,25 +1296,22 @@ class ModuleAutoGen(AutoGen): fStringIO.close () fInputfile.close () return OutputName + @cached_property def OutputFile(self): retVal = set() - OutputDir = self.OutputDir.replace('\\', '/').strip('/') - DebugDir = self.DebugDir.replace('\\', '/').strip('/') - for Item in self.CodaTargetList: - File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/') - retVal.add(File) - if self.DepexGenerated: - retVal.add(self.Name + '.depex') - Bin = self._GenOffsetBin() - if Bin: - retVal.add(Bin) + for Root, Dirs, Files in os.walk(self.BuildDir): + for File in Files: + # lib file is already added through above CodaTargetList, skip it here + if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')): + NewFile = path.join(Root, File) + retVal.add(NewFile) - for Root, Dirs, Files in os.walk(OutputDir): + for Root, Dirs, Files in os.walk(self.FfsOutputDir): for File in Files: - if File.lower().endswith('.pdb'): - retVal.add(File) + NewFile = path.join(Root, File) + retVal.add(NewFile) return retVal @@ -1608,62 +1643,88 @@ class ModuleAutoGen(AutoGen): self.IsAsBuiltInfCreated = True + def CacheCopyFile(self, DestDir, SourceDir, File): + if os.path.isdir(File): + return + + sub_dir = os.path.relpath(File, SourceDir) + destination_file = os.path.join(DestDir, sub_dir) + destination_dir = os.path.dirname(destination_file) + CreateDirectory(destination_dir) + try: + CopyFileOnChange(File, destination_dir) + except: + EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir)) + return + def CopyModuleToCache(self): - FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName) - CreateDirectory (FileDir) - HashFile = path.join(self.BuildDir, self.Name + '.hash') - if os.path.exists(HashFile): - CopyFileOnChange(HashFile, FileDir) - ModuleFile = path.join(self.OutputDir, self.Name + '.inf') - if os.path.exists(ModuleFile): - CopyFileOnChange(ModuleFile, FileDir) + # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList + # and PreMakeHashFileList files + MakeHashStr = None + PreMakeHashStr = None + MakeTimeStamp = 0 + PreMakeTimeStamp = 0 + Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))] + for File in Files: + if ".MakeHashFileList." in File: + #find lastest file through time stamp + FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8] + if FileTimeStamp > MakeTimeStamp: + MakeTimeStamp = FileTimeStamp + MakeHashStr = File.split('.')[-1] + if len(MakeHashStr) != 32: + EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File)) + if ".PreMakeHashFileList." in File: + FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8] + if FileTimeStamp > PreMakeTimeStamp: + PreMakeTimeStamp = FileTimeStamp + PreMakeHashStr = File.split('.')[-1] + if len(PreMakeHashStr) != 32: + EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File)) + + if not MakeHashStr: + EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch)) + return + if not PreMakeHashStr: + EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch)) + return + + # Create Cache destination dirs + FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName) + FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name) + CacheFileDir = path.join(FileDir, MakeHashStr) + CacheFfsDir = path.join(FfsDir, MakeHashStr) + CreateDirectory (CacheFileDir) + CreateDirectory (CacheFfsDir) + + # Create ModuleHashPair file to support multiple version cache together + ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair") + ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)] + if os.path.exists(ModuleHashPair): + with open(ModuleHashPair, 'r') as f: + ModuleHashPairList = json.load(f) + if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)): + ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr)) + with open(ModuleHashPair, 'w') as f: + json.dump(ModuleHashPairList, f, indent=2) + + # Copy files to Cache destination dirs if not self.OutputFile: Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain] self.OutputFile = Ma.Binaries for File in self.OutputFile: - File = str(File) - if not os.path.isabs(File): - File = os.path.join(self.OutputDir, File) - if os.path.exists(File): - sub_dir = os.path.relpath(File, self.OutputDir) - destination_file = os.path.join(FileDir, sub_dir) - destination_dir = os.path.dirname(destination_file) - CreateDirectory(destination_dir) - CopyFileOnChange(File, destination_dir) - - def AttemptModuleCacheCopy(self): - # If library or Module is binary do not skip by hash - if self.IsBinaryModule: - return False - # .inc is contains binary information so do not skip by hash as well - for f_ext in self.SourceFileList: - if '.inc' in str(f_ext): - return False - FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName) - HashFile = path.join(FileDir, self.Name + '.hash') - if os.path.exists(HashFile): - f = open(HashFile, 'r') - CacheHash = f.read() - f.close() - self.GenModuleHash() - if GlobalData.gModuleHash[self.Arch][self.Name]: - if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]: - for root, dir, files in os.walk(FileDir): - for f in files: - if self.Name + '.hash' in f: - CopyFileOnChange(HashFile, self.BuildDir) - else: - File = path.join(root, f) - sub_dir = os.path.relpath(File, FileDir) - destination_file = os.path.join(self.OutputDir, sub_dir) - destination_dir = os.path.dirname(destination_file) - CreateDirectory(destination_dir) - CopyFileOnChange(File, destination_dir) - if self.Name == "PcdPeim" or self.Name == "PcdDxe": - CreatePcdDatabaseCode(self, TemplateString(), TemplateString()) - return True - return False - + if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep): + self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File) + else: + if self.Name + ".autogen.hash." in File or \ + self.Name + ".autogen.hashchain." in File or \ + self.Name + ".hash." in File or \ + self.Name + ".hashchain." in File or \ + self.Name + ".PreMakeHashFileList." in File or \ + self.Name + ".MakeHashFileList." in File: + self.CacheCopyFile(FileDir, self.BuildDir, File) + else: + self.CacheCopyFile(CacheFileDir, self.BuildDir, File) ## Create makefile for the module and its dependent libraries # # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of @@ -1671,6 +1732,7 @@ class ModuleAutoGen(AutoGen): # @cached_class_function def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []): + # nest this function inside it's only caller. def CreateTimeStamp(): FileSet = {self.MetaFile.Path} @@ -1686,10 +1748,8 @@ class ModuleAutoGen(AutoGen): if os.path.exists (self.TimeStampPath): os.remove (self.TimeStampPath) - with open(self.TimeStampPath, 'w+') as fd: - for f in FileSet: - fd.write(f) - fd.write("\n") + + SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False) # Ignore generating makefile when it is a binary module if self.IsBinaryModule: @@ -1701,8 +1761,8 @@ class ModuleAutoGen(AutoGen): for LibraryAutoGen in self.LibraryAutoGenList: LibraryAutoGen.CreateMakeFile() - # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping - if not GlobalData.gUseHashCache and self.CanSkip(): + # CanSkip uses timestamps to determine build skipping + if self.CanSkip(): return if len(self.CustomMakefile) == 0: @@ -1718,6 +1778,12 @@ class ModuleAutoGen(AutoGen): CreateTimeStamp() + MakefileType = Makefile._FileType + MakefileName = Makefile._FILE_NAME_[MakefileType] + MakefilePath = os.path.join(self.MakeFileDir, MakefileName) + FilePath = path.join(self.BuildDir, self.Name + ".makefile") + SaveFileOnChange(FilePath, MakefilePath, False) + def CopyBinaryFiles(self): for File in self.Module.Binaries: SrcPath = File.Path @@ -1729,6 +1795,7 @@ class ModuleAutoGen(AutoGen): # dependent libraries will be created # def CreateCodeFile(self, CreateLibraryCodeFile=True): + if self.IsCodeFileCreated: return @@ -1744,10 +1811,8 @@ class ModuleAutoGen(AutoGen): if not self.IsLibrary and CreateLibraryCodeFile: for LibraryAutoGen in self.LibraryAutoGenList: LibraryAutoGen.CreateCodeFile() - # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping - if not GlobalData.gUseHashCache and self.CanSkip(): - return + self.LibraryAutoGenList AutoGenList = [] IgoredAutoGenList = [] @@ -1785,6 +1850,7 @@ class ModuleAutoGen(AutoGen): (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch)) self.IsCodeFileCreated = True + return AutoGenList ## Summarize the ModuleAutoGen objects of all libraries used by this module @@ -1808,92 +1874,550 @@ class ModuleAutoGen(AutoGen): self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE) return RetVal + def GenCMakeHash(self): + # GenCMakeHash can only be called in --binary-destination + # Never called in multiprocessing and always directly save result in main process, + # so no need remote dict to share the gCMakeHashFile result with main process + + DependencyFileSet = set() + # Add AutoGen files + if self.AutoGenFileList: + for File in set(self.AutoGenFileList): + DependencyFileSet.add(File) + + # Add Makefile + abspath = path.join(self.BuildDir, self.Name + ".makefile") + try: + with open(LongFilePath(abspath),"r") as fd: + lines = fd.readlines() + except Exception as e: + EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False) + if lines: + DependencyFileSet.update(lines) + + # Caculate all above dependency files hash + # Initialze hash object + FileList = [] + m = hashlib.md5() + for File in sorted(DependencyFileSet, key=lambda x: str(x)): + if not path.exists(LongFilePath(str(File))): + EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch)) + continue + with open(LongFilePath(str(File)), 'rb') as f: + Content = f.read() + m.update(Content) + FileList.append((str(File), hashlib.md5(Content).hexdigest())) + + HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest()) + GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile + try: + with open(LongFilePath(HashChainFile), 'w') as f: + json.dump(FileList, f, indent=2) + except: + EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile) + return False + def GenModuleHash(self): - # Initialize a dictionary for each arch type - if self.Arch not in GlobalData.gModuleHash: - GlobalData.gModuleHash[self.Arch] = {} + # GenModuleHash only called after autogen phase + # Never called in multiprocessing and always directly save result in main process, + # so no need remote dict to share the gModuleHashFile result with main process + # + # GenPreMakefileHashList consume no dict. + # GenPreMakefileHashList produce local gModuleHashFile dict. + + DependencyFileSet = set() + # Add Module Meta file + DependencyFileSet.add(self.MetaFile.Path) + + # Add Module's source files + if self.SourceFileList: + for File in set(self.SourceFileList): + DependencyFileSet.add(File.Path) + + # Add modules's include header files + # Directly use the deps.txt file in the module BuildDir + abspath = path.join(self.BuildDir, "deps.txt") + rt = None + try: + with open(LongFilePath(abspath),"r") as fd: + lines = fd.readlines() + if lines: + rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")]) + except Exception as e: + EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False) - # Early exit if module or library has been hashed and is in memory - if self.Name in GlobalData.gModuleHash[self.Arch]: - return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8') + if rt: + DependencyFileSet.update(rt) + + # Caculate all above dependency files hash # Initialze hash object + FileList = [] m = hashlib.md5() + BuildDirStr = path.abspath(self.BuildDir).lower() + for File in sorted(DependencyFileSet, key=lambda x: str(x)): + # Skip the AutoGen files in BuildDir which already been + # included in .autogen.hash. file + if BuildDirStr in path.abspath(File).lower(): + continue + if not path.exists(LongFilePath(File)): + EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch)) + continue + with open(LongFilePath(File), 'rb') as f: + Content = f.read() + m.update(Content) + FileList.append((File, hashlib.md5(Content).hexdigest())) + HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest()) + GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile + try: + with open(LongFilePath(HashChainFile), 'w') as f: + json.dump(FileList, f, indent=2) + except: + EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile) + return False + + def GenPreMakefileHashList(self): + # GenPreMakefileHashList consume below dicts: + # gPlatformHashFile + # gPackageHashFile + # gModuleHashFile + # GenPreMakefileHashList produce no dict. + # gModuleHashFile items might be produced in multiprocessing, so + # need check gModuleHashFile remote dict + + # skip binary module + if self.IsBinaryModule: + return + + FileList = [] + m = hashlib.md5() # Add Platform level hash - m.update(GlobalData.gPlatformHash.encode('utf-8')) + HashFile = GlobalData.gPlatformHashFile + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile) # Add Package level hash if self.DependentPackageList: for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName): - if Pkg.PackageName in GlobalData.gPackageHash: - m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8')) + if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile: + EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch)) + continue + HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)] + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile) + + # Add Module self + # GenPreMakefileHashList needed in both --binary-destination + # and --hash. And --hash might save ModuleHashFile in remote dict + # during multiprocessing. + if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile: + HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] + else: + EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch)) + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile) # Add Library hash if self.LibraryAutoGenList: - for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name): - if Lib.Name not in GlobalData.gModuleHash[self.Arch]: - Lib.GenModuleHash() - m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8')) + for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path): + + if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile: + HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)] + else: + EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch)) + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile) + + # Save PreMakeHashFileList + FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest()) + try: + with open(LongFilePath(FilePath), 'w') as f: + json.dump(FileList, f, indent=0) + except: + EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath) + + def GenMakefileHashList(self): + # GenMakefileHashList only need in --binary-destination which will + # everything in local dict. So don't need check remote dict. + + # skip binary module + if self.IsBinaryModule: + return + + FileList = [] + m = hashlib.md5() + # Add AutoGen hash + HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile) # Add Module self - f = open(str(self.MetaFile), 'rb') - Content = f.read() - f.close() - m.update(Content) + if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile: + HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] + else: + EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch)) + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile) - # Add Module's source files - if self.SourceFileList: - for File in sorted(self.SourceFileList, key=lambda x: str(x)): - f = open(str(File), 'rb') - Content = f.read() - f.close() - m.update(Content) + # Add Library hash + if self.LibraryAutoGenList: + for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path): + if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile: + HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)] + else: + EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch)) + if path.exists(LongFilePath(HashFile)): + FileList.append(HashFile) + m.update(HashFile.encode('utf-8')) + else: + EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile) - GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest() + # Save MakeHashFileList + FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest()) + try: + with open(LongFilePath(FilePath), 'w') as f: + json.dump(FileList, f, indent=0) + except: + EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath) + + def CheckHashChainFile(self, HashChainFile): + # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr' + # The x is module name and the 16BytesHexStr is md5 hexdigest of + # all hashchain files content + HashStr = HashChainFile.split('.')[-1] + if len(HashStr) != 32: + EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File)) + return False - return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8') + try: + with open(LongFilePath(HashChainFile), 'r') as f: + HashChainList = json.load(f) + except: + EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile) + return False - ## Decide whether we can skip the ModuleAutoGen process - def CanSkipbyHash(self): - # Hashing feature is off - if not GlobalData.gUseHashCache: + # Print the different file info + # print(HashChainFile) + for idx, (SrcFile, SrcHash) in enumerate (HashChainList): + if SrcFile in GlobalData.gFileHashDict: + DestHash = GlobalData.gFileHashDict[SrcFile] + else: + try: + with open(LongFilePath(SrcFile), 'rb') as f: + Content = f.read() + DestHash = hashlib.md5(Content).hexdigest() + GlobalData.gFileHashDict[SrcFile] = DestHash + except IOError as X: + # cache miss if SrcFile is removed in new version code + GlobalData.gFileHashDict[SrcFile] = 0 + EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile)) + return False + if SrcHash != DestHash: + EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile)) + return False + + return True + + ## Decide whether we can skip the left autogen and make process + def CanSkipbyMakeCache(self): + # For --binary-source only + # CanSkipbyMakeCache consume below dicts: + # gModuleMakeCacheStatus + # gHashChainStatus + # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict. + # all these dicts might be produced in multiprocessing, so + # need check these remote dict + + if not GlobalData.gBinCacheSource: return False - # Initialize a dictionary for each arch type - if self.Arch not in GlobalData.gBuildHashSkipTracking: - GlobalData.gBuildHashSkipTracking[self.Arch] = dict() + if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus: + return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] - # If library or Module is binary do not skip by hash + # If Module is binary, which has special build rule, do not skip by cache. if self.IsBinaryModule: + print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch) + GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False return False - # .inc is contains binary information so do not skip by hash as well + # see .inc as binary file, do not skip by hash for f_ext in self.SourceFileList: if '.inc' in str(f_ext): + print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch) + GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False return False - # Use Cache, if exists and if Module has a copy in cache - if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy(): + ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName) + FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name) + + ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)] + ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair") + try: + with open(LongFilePath(ModuleHashPair), 'r') as f: + ModuleHashPairList = json.load(f) + except: + # ModuleHashPair might not exist for new added module + GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair) + print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch) + return False + + # Check the PreMakeHash in ModuleHashPairList one by one + for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList): + SourceHashDir = path.join(ModuleCacheDir, MakeHash) + SourceFfsHashDir = path.join(FfsDir, MakeHash) + PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash) + MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash) + + try: + with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f: + MakeHashFileList = json.load(f) + except: + EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah) + continue + + HashMiss = False + for HashChainFile in MakeHashFileList: + HashChainStatus = None + if HashChainFile in GlobalData.gHashChainStatus: + HashChainStatus = GlobalData.gHashChainStatus[HashChainFile] + if HashChainStatus == False: + HashMiss = True + break + elif HashChainStatus == True: + continue + # Convert to path start with cache source dir + RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir) + NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath) + if self.CheckHashChainFile(NewFilePath): + GlobalData.gHashChainStatus[HashChainFile] = True + # Save the module self HashFile for GenPreMakefileHashList later usage + if self.Name + ".hashchain." in HashChainFile: + GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile + else: + GlobalData.gHashChainStatus[HashChainFile] = False + HashMiss = True + break + + if HashMiss: + continue + + # PreMakefile cache hit, restore the module build result + for root, dir, files in os.walk(SourceHashDir): + for f in files: + File = path.join(root, f) + self.CacheCopyFile(self.BuildDir, SourceHashDir, File) + if os.path.exists(SourceFfsHashDir): + for root, dir, files in os.walk(SourceFfsHashDir): + for f in files: + File = path.join(root, f) + self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File) + + if self.Name == "PcdPeim" or self.Name == "PcdDxe": + CreatePcdDatabaseCode(self, TemplateString(), TemplateString()) + + print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True return True - # Early exit for libraries that haven't yet finished building - HashFile = path.join(self.BuildDir, self.Name + ".hash") - if self.IsLibrary and not os.path.exists(HashFile): + print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + + ## Decide whether we can skip the left autogen and make process + def CanSkipbyPreMakeCache(self): + # CanSkipbyPreMakeCache consume below dicts: + # gModulePreMakeCacheStatus + # gHashChainStatus + # gModuleHashFile + # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict. + # all these dicts might be produced in multiprocessing, so + # need check these remote dicts + + if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest: return False - # Return a Boolean based on if can skip by hash, either from memory or from IO. - if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]: - # If hashes are the same, SaveFileOnChange() will return False. - GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True) - return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] - else: - return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] + if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus: + return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] + + # If Module is binary, which has special build rule, do not skip by cache. + if self.IsBinaryModule: + print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + + # see .inc as binary file, do not skip by hash + for f_ext in self.SourceFileList: + if '.inc' in str(f_ext): + print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + + # For --hash only in the incremental build + if not GlobalData.gBinCacheSource: + Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))] + PreMakeHashFileList_FilePah = None + MakeTimeStamp = 0 + # Find latest PreMakeHashFileList file in self.BuildDir folder + for File in Files: + if ".PreMakeHashFileList." in File: + FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8] + if FileTimeStamp > MakeTimeStamp: + MakeTimeStamp = FileTimeStamp + PreMakeHashFileList_FilePah = File + if not PreMakeHashFileList_FilePah: + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + + try: + with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f: + PreMakeHashFileList = json.load(f) + except: + EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah) + print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + + HashMiss = False + for HashChainFile in PreMakeHashFileList: + HashChainStatus = None + if HashChainFile in GlobalData.gHashChainStatus: + HashChainStatus = GlobalData.gHashChainStatus[HashChainFile] + if HashChainStatus == False: + HashMiss = True + break + elif HashChainStatus == True: + continue + if self.CheckHashChainFile(HashChainFile): + GlobalData.gHashChainStatus[HashChainFile] = True + # Save the module self HashFile for GenPreMakefileHashList later usage + if self.Name + ".hashchain." in HashChainFile: + GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile + else: + GlobalData.gHashChainStatus[HashChainFile] = False + HashMiss = True + break + + if HashMiss: + print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + else: + print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True + return True + + ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName) + FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name) + + ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)] + ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair") + try: + with open(LongFilePath(ModuleHashPair), 'r') as f: + ModuleHashPairList = json.load(f) + except: + # ModuleHashPair might not exist for new added module + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair) + print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch) + return False + + # Check the PreMakeHash in ModuleHashPairList one by one + for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList): + SourceHashDir = path.join(ModuleCacheDir, MakeHash) + SourceFfsHashDir = path.join(FfsDir, MakeHash) + PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash) + MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash) + + try: + with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f: + PreMakeHashFileList = json.load(f) + except: + EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah) + continue + + HashMiss = False + for HashChainFile in PreMakeHashFileList: + HashChainStatus = None + if HashChainFile in GlobalData.gHashChainStatus: + HashChainStatus = GlobalData.gHashChainStatus[HashChainFile] + if HashChainStatus == False: + HashMiss = True + break + elif HashChainStatus == True: + continue + # Convert to path start with cache source dir + RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir) + NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath) + if self.CheckHashChainFile(NewFilePath): + GlobalData.gHashChainStatus[HashChainFile] = True + else: + GlobalData.gHashChainStatus[HashChainFile] = False + HashMiss = True + break + + if HashMiss: + continue + + # PreMakefile cache hit, restore the module build result + for root, dir, files in os.walk(SourceHashDir): + for f in files: + File = path.join(root, f) + self.CacheCopyFile(self.BuildDir, SourceHashDir, File) + if os.path.exists(SourceFfsHashDir): + for root, dir, files in os.walk(SourceFfsHashDir): + for f in files: + File = path.join(root, f) + self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File) + + if self.Name == "PcdPeim" or self.Name == "PcdDxe": + CreatePcdDatabaseCode(self, TemplateString(), TemplateString()) + + print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True + return True + + print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch) + GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False + return False + + ## Decide whether we can skip the Module build + def CanSkipbyCache(self, gHitSet): + # Hashing feature is off + if not GlobalData.gBinCacheSource: + return False + + if self in gHitSet: + return True + + return False ## Decide whether we can skip the ModuleAutoGen process # If any source file is newer than the module than we cannot skip # def CanSkip(self): + # Don't skip if cache feature enabled + if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource: + return False if self.MakeFileDir in GlobalData.gSikpAutoGenCache: return True if not os.path.exists(self.TimeStampPath):