From 94459080c118049aba927ec0444ba5b750b7d2c9 Mon Sep 17 00:00:00 2001 From: "Shi, Steven" Date: Thu, 15 Aug 2019 22:26:21 +0800 Subject: [PATCH] BaseTools: Improve the file saving and copying reliability BZ:https://bugzilla.tianocore.org/show_bug.cgi?id=2079 The Basetool CopyFileOnChange() and SaveFileOnChange() functions might raise the IOError occasionally when build in Windows with multi-process and build cache enabled. The CopyFileOnChange() and SaveFileOnChange() might be invoked in multiple sub-processes simultaneously, and this patch adds global locks to sync these functions invoking which can harden their reliability. Cc: Liming Gao Cc: Bob Feng Signed-off-by: Steven Shi Reviewed-by: Bob Feng --- .../Source/Python/AutoGen/AutoGenWorker.py | 6 +- BaseTools/Source/Python/AutoGen/CacheIR.py | 1 + BaseTools/Source/Python/AutoGen/DataPipe.py | 2 - BaseTools/Source/Python/AutoGen/GenC.py | 0 .../Source/Python/AutoGen/ModuleAutoGen.py | 101 ++++++++++++------ BaseTools/Source/Python/Common/GlobalData.py | 2 + BaseTools/Source/Python/Common/Misc.py | 44 ++++++-- BaseTools/Source/Python/build/build.py | 5 +- 8 files changed, 119 insertions(+), 42 deletions(-) mode change 100644 => 100755 BaseTools/Source/Python/AutoGen/GenC.py mode change 100644 => 100755 BaseTools/Source/Python/Common/Misc.py diff --git a/BaseTools/Source/Python/AutoGen/AutoGenWorker.py b/BaseTools/Source/Python/AutoGen/AutoGenWorker.py index 30d2f96fc7..2e68538b1c 100755 --- a/BaseTools/Source/Python/AutoGen/AutoGenWorker.py +++ b/BaseTools/Source/Python/AutoGen/AutoGenWorker.py @@ -133,7 +133,7 @@ class AutoGenManager(threading.Thread): def kill(self): self.feedback_q.put(None) class AutoGenWorkerInProcess(mp.Process): - def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock, share_data,log_q,error_event): + def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_lock,share_data,log_q,error_event): mp.Process.__init__(self) self.module_queue = module_queue self.data_pipe_file_path =data_pipe_file_path @@ -141,6 +141,7 @@ class AutoGenWorkerInProcess(mp.Process): self.feedback_q = feedback_q self.PlatformMetaFileSet = {} self.file_lock = file_lock + self.cache_lock = cache_lock self.share_data = share_data self.log_q = log_q self.error_event = error_event @@ -184,9 +185,10 @@ class AutoGenWorkerInProcess(mp.Process): GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource") GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest") - GlobalData.gCacheIR = self.data_pipe.Get("CacheIR") + GlobalData.gCacheIR = self.share_data GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread") GlobalData.file_lock = self.file_lock + GlobalData.cache_lock = self.cache_lock CommandTarget = self.data_pipe.Get("CommandTarget") pcd_from_build_option = [] for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): diff --git a/BaseTools/Source/Python/AutoGen/CacheIR.py b/BaseTools/Source/Python/AutoGen/CacheIR.py index 2d9ffe3f0b..715be5273c 100755 --- a/BaseTools/Source/Python/AutoGen/CacheIR.py +++ b/BaseTools/Source/Python/AutoGen/CacheIR.py @@ -24,5 +24,6 @@ class ModuleBuildCacheIR(): self.MakeHashDigest = None self.MakeHashHexDigest = None self.MakeHashChain = [] + self.CacheCrash = False self.PreMakeCacheHit = False self.MakeCacheHit = False diff --git a/BaseTools/Source/Python/AutoGen/DataPipe.py b/BaseTools/Source/Python/AutoGen/DataPipe.py index 87a1a125c8..2ca4f9ff4a 100755 --- a/BaseTools/Source/Python/AutoGen/DataPipe.py +++ b/BaseTools/Source/Python/AutoGen/DataPipe.py @@ -163,6 +163,4 @@ class MemoryDataPipe(DataPipe): self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest} - self.DataContainer = {"CacheIR":GlobalData.gCacheIR} - self.DataContainer = {"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread} \ No newline at end of file diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py old mode 100644 new mode 100755 diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py index ffa4727e59..2cd0d3859e 100755 --- a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py +++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py @@ -28,6 +28,7 @@ from Common.caching import cached_class_function from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo from AutoGen.CacheIR import ModuleBuildCacheIR import json +import tempfile ## Mapping Makefile type gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"} @@ -1702,9 +1703,8 @@ class ModuleAutoGen(AutoGen): try: ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)] if os.path.exists(ModuleHashPair): - f = open(ModuleHashPair, 'r') - ModuleHashPairList = json.load(f) - f.close() + with open(ModuleHashPair, 'r') as f: + ModuleHashPairList = json.load(f) PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest ModuleHashPairList.append((PreMakeHash, MakeHash)) @@ -1766,10 +1766,12 @@ class ModuleAutoGen(AutoGen): if os.path.exists (self.TimeStampPath): os.remove (self.TimeStampPath) - with open(self.TimeStampPath, 'w+') as fd: + with tempfile.NamedTemporaryFile('w+', dir=os.path.dirname(self.TimeStampPath), delete=False) as tf: for f in FileSet: - fd.write(f) - fd.write("\n") + tf.write(f) + tf.write("\n") + tempname = tf.name + SaveFileOnChange(self.TimeStampPath, tempname, False) # Ignore generating makefile when it is a binary module if self.IsBinaryModule: @@ -1806,7 +1808,7 @@ class ModuleAutoGen(AutoGen): MewIR.MakefilePath = MakefilePath MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet MewIR.CreateMakeFileDone = True - with GlobalData.file_lock: + with GlobalData.cache_lock: try: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.MakefilePath = MakefilePath @@ -1891,7 +1893,7 @@ class ModuleAutoGen(AutoGen): self.IsCodeFileCreated = True MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch) MewIR.CreateCodeFileDone = True - with GlobalData.file_lock: + with GlobalData.cache_lock: try: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.CreateCodeFileDone = True @@ -1951,9 +1953,8 @@ class ModuleAutoGen(AutoGen): m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8')) # Add Module self - f = open(str(self.MetaFile), 'rb') - Content = f.read() - f.close() + with open(str(self.MetaFile), 'rb') as f: + Content = f.read() m.update(Content) # Add Module's source files @@ -1974,6 +1975,11 @@ class ModuleAutoGen(AutoGen): if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain: return gDict[(self.MetaFile.Path, self.Arch)] + # skip if the module cache already crashed + if (self.MetaFile.Path, self.Arch) in gDict and \ + gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return + DependencyFileSet = set() # Add Module Meta file DependencyFileSet.add(self.MetaFile) @@ -2021,9 +2027,8 @@ class ModuleAutoGen(AutoGen): if not os.path.exists(str(File)): EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch)) continue - f = open(str(File), 'rb') - Content = f.read() - f.close() + with open(str(File), 'rb') as f: + Content = f.read() m.update(Content) FileList.append((str(File), hashlib.md5(Content).hexdigest())) @@ -2032,7 +2037,7 @@ class ModuleAutoGen(AutoGen): MewIR.ModuleFilesHashDigest = m.digest() MewIR.ModuleFilesHashHexDigest = m.hexdigest() MewIR.ModuleFilesChain = FileList - with GlobalData.file_lock: + with GlobalData.cache_lock: try: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.ModuleFilesHashDigest = m.digest() @@ -2050,6 +2055,11 @@ class ModuleAutoGen(AutoGen): gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest: return gDict[(self.MetaFile.Path, self.Arch)] + # skip if the module cache already crashed + if (self.MetaFile.Path, self.Arch) in gDict and \ + gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return + # skip binary module if self.IsBinaryModule: return @@ -2091,7 +2101,7 @@ class ModuleAutoGen(AutoGen): # Add Module self m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest) - with GlobalData.file_lock: + with GlobalData.cache_lock: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.PreMakefileHashHexDigest = m.hexdigest() gDict[(self.MetaFile.Path, self.Arch)] = IR @@ -2104,6 +2114,11 @@ class ModuleAutoGen(AutoGen): gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest: return gDict[(self.MetaFile.Path, self.Arch)] + # skip if the module cache already crashed + if (self.MetaFile.Path, self.Arch) in gDict and \ + gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return + # skip binary module if self.IsBinaryModule: return @@ -2159,7 +2174,7 @@ class ModuleAutoGen(AutoGen): m.update(Content) FileList.append((str(File), hashlib.md5(Content).hexdigest())) - with GlobalData.file_lock: + with GlobalData.cache_lock: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.AutoGenFileList = self.AutoGenFileList.keys() IR.MakeHeaderFilesHashChain = FileList @@ -2174,6 +2189,11 @@ class ModuleAutoGen(AutoGen): gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain: return gDict[(self.MetaFile.Path, self.Arch)] + # skip if the module cache already crashed + if (self.MetaFile.Path, self.Arch) in gDict and \ + gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return + # skip binary module if self.IsBinaryModule: return @@ -2222,7 +2242,7 @@ class ModuleAutoGen(AutoGen): New.sort(key=lambda x: str(x)) MakeHashChain += New - with GlobalData.file_lock: + with GlobalData.cache_lock: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.MakeHashDigest = m.digest() IR.MakeHashHexDigest = m.hexdigest() @@ -2236,6 +2256,12 @@ class ModuleAutoGen(AutoGen): if not GlobalData.gBinCacheSource: return False + if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit: + return True + + if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return False + # If Module is binary, do not skip by cache if self.IsBinaryModule: return False @@ -2255,12 +2281,15 @@ class ModuleAutoGen(AutoGen): ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair") if not os.path.exists(ModuleHashPair): EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair) + with GlobalData.cache_lock: + IR = gDict[(self.MetaFile.Path, self.Arch)] + IR.CacheCrash = True + gDict[(self.MetaFile.Path, self.Arch)] = IR return False try: - f = open(ModuleHashPair, 'r') - ModuleHashPairList = json.load(f) - f.close() + with open(ModuleHashPair, 'r') as f: + ModuleHashPairList = json.load(f) except: EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair) return False @@ -2300,7 +2329,7 @@ class ModuleAutoGen(AutoGen): if self.Name == "PcdPeim" or self.Name == "PcdDxe": CreatePcdDatabaseCode(self, TemplateString(), TemplateString()) - with GlobalData.file_lock: + with GlobalData.cache_lock: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.PreMakeCacheHit = True gDict[(self.MetaFile.Path, self.Arch)] = IR @@ -2313,6 +2342,12 @@ class ModuleAutoGen(AutoGen): if not GlobalData.gBinCacheSource: return False + if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit: + return True + + if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return False + # If Module is binary, do not skip by cache if self.IsBinaryModule: print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch) @@ -2321,7 +2356,7 @@ class ModuleAutoGen(AutoGen): # .inc is contains binary information so do not skip by hash as well for f_ext in self.SourceFileList: if '.inc' in str(f_ext): - with GlobalData.file_lock: + with GlobalData.cache_lock: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.MakeCacheHit = False gDict[(self.MetaFile.Path, self.Arch)] = IR @@ -2338,12 +2373,15 @@ class ModuleAutoGen(AutoGen): ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair") if not os.path.exists(ModuleHashPair): EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair) + with GlobalData.cache_lock: + IR = gDict[(self.MetaFile.Path, self.Arch)] + IR.CacheCrash = True + gDict[(self.MetaFile.Path, self.Arch)] = IR return False try: - f = open(ModuleHashPair, 'r') - ModuleHashPairList = json.load(f) - f.close() + with open(ModuleHashPair, 'r') as f: + ModuleHashPairList = json.load(f) except: EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair) return False @@ -2383,7 +2421,7 @@ class ModuleAutoGen(AutoGen): if self.Name == "PcdPeim" or self.Name == "PcdDxe": CreatePcdDatabaseCode(self, TemplateString(), TemplateString()) - with GlobalData.file_lock: + with GlobalData.cache_lock: IR = gDict[(self.MetaFile.Path, self.Arch)] IR.MakeCacheHit = True gDict[(self.MetaFile.Path, self.Arch)] = IR @@ -2395,6 +2433,10 @@ class ModuleAutoGen(AutoGen): if not GlobalData.gBinCacheSource: return + # skip if the module cache already crashed + if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash: + return + # skip binary module if self.IsBinaryModule: return @@ -2420,9 +2462,8 @@ class ModuleAutoGen(AutoGen): return try: - f = open(ModuleHashPair, 'r') - ModuleHashPairList = json.load(f) - f.close() + with open(ModuleHashPair, 'r') as f: + ModuleHashPairList = json.load(f) except: EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch)) return diff --git a/BaseTools/Source/Python/Common/GlobalData.py b/BaseTools/Source/Python/Common/GlobalData.py index 9ea835314a..61327ad8f1 100755 --- a/BaseTools/Source/Python/Common/GlobalData.py +++ b/BaseTools/Source/Python/Common/GlobalData.py @@ -122,6 +122,8 @@ gBuildHashSkipTracking = dict() # Common dictionary to share module cache intermediate result and state gCacheIR = None +# Common lock for the module cache intermediate data +cache_lock = None # Common lock for the file access in multiple process AutoGens file_lock = None # Common dictionary to share platform libraries' constant Pcd diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py old mode 100644 new mode 100755 index 554ec010dd..4799635cc4 --- a/BaseTools/Source/Python/Common/Misc.py +++ b/BaseTools/Source/Python/Common/Misc.py @@ -448,7 +448,7 @@ def RemoveDirectory(Directory, Recursively=False): # @retval True If the file content is changed and the file is renewed # @retval False If the file content is the same # -def SaveFileOnChange(File, Content, IsBinaryFile=True): +def SaveFileOnChange(File, Content, IsBinaryFile=True, FileLock=None): if os.path.exists(File): if IsBinaryFile: @@ -479,6 +479,13 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True): if IsBinaryFile: OpenMode = "wb" + # use default file_lock if no input new lock + if not FileLock: + FileLock = GlobalData.file_lock + if FileLock: + FileLock.acquire() + + if GlobalData.gIsWindows and not os.path.exists(File): # write temp file, then rename the temp file to the real file # to make sure the file be immediate saved to disk @@ -487,14 +494,26 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True): tempname = tf.name try: os.rename(tempname, File) - except: - EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) + except IOError as X: + if GlobalData.gBinCacheSource: + EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X)) + else: + EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) + finally: + if FileLock: + FileLock.release() else: try: with open(File, OpenMode) as Fd: Fd.write(Content) except IOError as X: - EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) + if GlobalData.gBinCacheSource: + EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X)) + else: + EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) + finally: + if FileLock: + FileLock.release() return True @@ -510,7 +529,7 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True): # @retval True The two files content are different and the file is copied # @retval False No copy really happen # -def CopyFileOnChange(SrcFile, Dst): +def CopyFileOnChange(SrcFile, Dst, FileLock=None): if not os.path.exists(SrcFile): return False @@ -531,6 +550,12 @@ def CopyFileOnChange(SrcFile, Dst): if not os.access(DirName, os.W_OK): EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName) + # use default file_lock if no input new lock + if not FileLock: + FileLock = GlobalData.file_lock + if FileLock: + FileLock.acquire() + # os.replace and os.rename are the atomic operations in python 3 and 2. # we use these two atomic operations to ensure the file copy is atomic: # copy the src to a temp file in the dst same folder firstly, then @@ -546,9 +571,14 @@ def CopyFileOnChange(SrcFile, Dst): if GlobalData.gIsWindows and os.path.exists(DstFile): os.remove(DstFile) os.rename(tempname, DstFile) - except IOError as X: - EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='IOError %s' % X) + if GlobalData.gBinCacheSource: + EdkLogger.quiet("[cache error]:fails to copy file with error: %s" % (X)) + else: + EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='IOError %s' % X) + finally: + if FileLock: + FileLock.release() return True diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index 299fa64311..2c10670a69 100755 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -820,13 +820,15 @@ class Build(): file_lock = mp.Lock() error_event = mp.Event() GlobalData.file_lock = file_lock + cache_lock = mp.Lock() + GlobalData.cache_lock = cache_lock FfsCmd = DataPipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} GlobalData.FfsCmd = FfsCmd GlobalData.libConstPcd = DataPipe.Get("LibConstPcd") GlobalData.Refes = DataPipe.Get("REFS") - auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)] + auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,cache_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)] self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event) self.AutoGenMgr.start() for w in auto_workers: @@ -1826,6 +1828,7 @@ class Build(): for PkgName in GlobalData.gPackageHash.keys(): GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName] GlobalData.file_lock = mp.Lock() + GlobalData.cache_lock = mp.Lock() GlobalData.FfsCmd = CmdListDict self.Progress.Stop("done!") -- 2.39.2