summary |
shortlog |
log |
commit | commitdiff |
tree
raw |
patch |
inline | side by side (from parent 1:
d01a998)
BZ:https://bugzilla.tianocore.org/show_bug.cgi?id=2079
The Basetool CopyFileOnChange() and SaveFileOnChange()
functions might raise the IOError occasionally when build
in Windows with multi-process and build cache enabled.
The CopyFileOnChange() and SaveFileOnChange() might be invoked
in multiple sub-processes simultaneously, and this patch adds
global locks to sync these functions invoking which can
harden their reliability.
Cc: Liming Gao <liming.gao@intel.com>
Cc: Bob Feng <bob.c.feng@intel.com>
Signed-off-by: Steven Shi <steven.shi@intel.com>
Reviewed-by: Bob Feng <bob.c.feng@intel.com>
def kill(self):\r
self.feedback_q.put(None)\r
class AutoGenWorkerInProcess(mp.Process):\r
def kill(self):\r
self.feedback_q.put(None)\r
class AutoGenWorkerInProcess(mp.Process):\r
- def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock, share_data,log_q,error_event):\r
+ def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_lock,share_data,log_q,error_event):\r
mp.Process.__init__(self)\r
self.module_queue = module_queue\r
self.data_pipe_file_path =data_pipe_file_path\r
mp.Process.__init__(self)\r
self.module_queue = module_queue\r
self.data_pipe_file_path =data_pipe_file_path\r
self.feedback_q = feedback_q\r
self.PlatformMetaFileSet = {}\r
self.file_lock = file_lock\r
self.feedback_q = feedback_q\r
self.PlatformMetaFileSet = {}\r
self.file_lock = file_lock\r
+ self.cache_lock = cache_lock\r
self.share_data = share_data\r
self.log_q = log_q\r
self.error_event = error_event\r
self.share_data = share_data\r
self.log_q = log_q\r
self.error_event = error_event\r
GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")\r
GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")\r
GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")\r
GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")\r
GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")\r
GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")\r
- GlobalData.gCacheIR = self.data_pipe.Get("CacheIR")\r
+ GlobalData.gCacheIR = self.share_data\r
GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")\r
GlobalData.file_lock = self.file_lock\r
GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")\r
GlobalData.file_lock = self.file_lock\r
+ GlobalData.cache_lock = self.cache_lock\r
CommandTarget = self.data_pipe.Get("CommandTarget")\r
pcd_from_build_option = []\r
for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):\r
CommandTarget = self.data_pipe.Get("CommandTarget")\r
pcd_from_build_option = []\r
for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):\r
self.MakeHashDigest = None\r
self.MakeHashHexDigest = None\r
self.MakeHashChain = []\r
self.MakeHashDigest = None\r
self.MakeHashHexDigest = None\r
self.MakeHashChain = []\r
+ self.CacheCrash = False\r
self.PreMakeCacheHit = False\r
self.MakeCacheHit = False\r
self.PreMakeCacheHit = False\r
self.MakeCacheHit = False\r
\r
self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest}\r
\r
\r
self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest}\r
\r
- self.DataContainer = {"CacheIR":GlobalData.gCacheIR}\r
-\r
self.DataContainer = {"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread}\r
\ No newline at end of file
self.DataContainer = {"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread}\r
\ No newline at end of file
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
from AutoGen.CacheIR import ModuleBuildCacheIR\r
import json\r
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
from AutoGen.CacheIR import ModuleBuildCacheIR\r
import json\r
\r
## Mapping Makefile type\r
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
\r
## Mapping Makefile type\r
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
try:\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
if os.path.exists(ModuleHashPair):\r
try:\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
if os.path.exists(ModuleHashPair):\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
ModuleHashPairList.append((PreMakeHash, MakeHash))\r
PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
ModuleHashPairList.append((PreMakeHash, MakeHash))\r
\r
if os.path.exists (self.TimeStampPath):\r
os.remove (self.TimeStampPath)\r
\r
if os.path.exists (self.TimeStampPath):\r
os.remove (self.TimeStampPath)\r
- with open(self.TimeStampPath, 'w+') as fd:\r
+ with tempfile.NamedTemporaryFile('w+', dir=os.path.dirname(self.TimeStampPath), delete=False) as tf:\r
- fd.write(f)\r
- fd.write("\n")\r
+ tf.write(f)\r
+ tf.write("\n")\r
+ tempname = tf.name\r
+ SaveFileOnChange(self.TimeStampPath, tempname, False)\r
\r
# Ignore generating makefile when it is a binary module\r
if self.IsBinaryModule:\r
\r
# Ignore generating makefile when it is a binary module\r
if self.IsBinaryModule:\r
MewIR.MakefilePath = MakefilePath\r
MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
MewIR.CreateMakeFileDone = True\r
MewIR.MakefilePath = MakefilePath\r
MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
MewIR.CreateMakeFileDone = True\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakefilePath = MakefilePath\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakefilePath = MakefilePath\r
self.IsCodeFileCreated = True\r
MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
MewIR.CreateCodeFileDone = True\r
self.IsCodeFileCreated = True\r
MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
MewIR.CreateCodeFileDone = True\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.CreateCodeFileDone = True\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.CreateCodeFileDone = True\r
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'rb')\r
- Content = f.read()\r
- f.close()\r
+ with open(str(self.MetaFile), 'rb') as f:\r
+ Content = f.read()\r
m.update(Content)\r
\r
# Add Module's source files\r
m.update(Content)\r
\r
# Add Module's source files\r
if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
DependencyFileSet = set()\r
# Add Module Meta file\r
DependencyFileSet.add(self.MetaFile)\r
DependencyFileSet = set()\r
# Add Module Meta file\r
DependencyFileSet.add(self.MetaFile)\r
if not os.path.exists(str(File)):\r
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
continue\r
if not os.path.exists(str(File)):\r
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
continue\r
- f = open(str(File), 'rb')\r
- Content = f.read()\r
- f.close()\r
+ with open(str(File), 'rb') as f:\r
+ Content = f.read()\r
m.update(Content)\r
FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
m.update(Content)\r
FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
MewIR.ModuleFilesHashDigest = m.digest()\r
MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
MewIR.ModuleFilesChain = FileList\r
MewIR.ModuleFilesHashDigest = m.digest()\r
MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
MewIR.ModuleFilesChain = FileList\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.ModuleFilesHashDigest = m.digest()\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.ModuleFilesHashDigest = m.digest()\r
gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
# Add Module self\r
m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
\r
# Add Module self\r
m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.PreMakefileHashHexDigest = m.hexdigest()\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.PreMakefileHashHexDigest = m.hexdigest()\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
m.update(Content)\r
FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
m.update(Content)\r
FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.AutoGenFileList = self.AutoGenFileList.keys()\r
IR.MakeHeaderFilesHashChain = FileList\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.AutoGenFileList = self.AutoGenFileList.keys()\r
IR.MakeHeaderFilesHashChain = FileList\r
gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
New.sort(key=lambda x: str(x))\r
MakeHashChain += New\r
\r
New.sort(key=lambda x: str(x))\r
MakeHashChain += New\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeHashDigest = m.digest()\r
IR.MakeHashHexDigest = m.hexdigest()\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeHashDigest = m.digest()\r
IR.MakeHashHexDigest = m.hexdigest()\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
+ if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
+ return True\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return False\r
+\r
# If Module is binary, do not skip by cache\r
if self.IsBinaryModule:\r
return False\r
# If Module is binary, do not skip by cache\r
if self.IsBinaryModule:\r
return False\r
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
if not os.path.exists(ModuleHashPair):\r
EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
if not os.path.exists(ModuleHashPair):\r
EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CacheCrash = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
except:\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
return False\r
except:\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
return False\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.PreMakeCacheHit = True\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.PreMakeCacheHit = True\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
+ return True\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return False\r
+\r
# If Module is binary, do not skip by cache\r
if self.IsBinaryModule:\r
print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
# If Module is binary, do not skip by cache\r
if self.IsBinaryModule:\r
print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
# .inc is contains binary information so do not skip by hash as well\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
# .inc is contains binary information so do not skip by hash as well\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeCacheHit = False\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeCacheHit = False\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
if not os.path.exists(ModuleHashPair):\r
EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
if not os.path.exists(ModuleHashPair):\r
EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CacheCrash = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
except:\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
return False\r
except:\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
return False\r
\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeCacheHit = True\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeCacheHit = True\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
if not GlobalData.gBinCacheSource:\r
return\r
\r
if not GlobalData.gBinCacheSource:\r
return\r
\r
+ # skip if the module cache already crashed\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
except:\r
EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
return\r
except:\r
EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
return\r
\r
# Common dictionary to share module cache intermediate result and state\r
gCacheIR = None\r
\r
# Common dictionary to share module cache intermediate result and state\r
gCacheIR = None\r
+# Common lock for the module cache intermediate data\r
+cache_lock = None\r
# Common lock for the file access in multiple process AutoGens\r
file_lock = None\r
# Common dictionary to share platform libraries' constant Pcd\r
# Common lock for the file access in multiple process AutoGens\r
file_lock = None\r
# Common dictionary to share platform libraries' constant Pcd\r
# @retval True If the file content is changed and the file is renewed\r
# @retval False If the file content is the same\r
#\r
# @retval True If the file content is changed and the file is renewed\r
# @retval False If the file content is the same\r
#\r
-def SaveFileOnChange(File, Content, IsBinaryFile=True):\r
+def SaveFileOnChange(File, Content, IsBinaryFile=True, FileLock=None):\r
\r
if os.path.exists(File):\r
if IsBinaryFile:\r
\r
if os.path.exists(File):\r
if IsBinaryFile:\r
if IsBinaryFile:\r
OpenMode = "wb"\r
\r
if IsBinaryFile:\r
OpenMode = "wb"\r
\r
+ # use default file_lock if no input new lock\r
+ if not FileLock:\r
+ FileLock = GlobalData.file_lock\r
+ if FileLock:\r
+ FileLock.acquire()\r
+\r
+\r
if GlobalData.gIsWindows and not os.path.exists(File):\r
# write temp file, then rename the temp file to the real file\r
# to make sure the file be immediate saved to disk\r
if GlobalData.gIsWindows and not os.path.exists(File):\r
# write temp file, then rename the temp file to the real file\r
# to make sure the file be immediate saved to disk\r
tempname = tf.name\r
try:\r
os.rename(tempname, File)\r
tempname = tf.name\r
try:\r
os.rename(tempname, File)\r
- except:\r
- EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
+ except IOError as X:\r
+ if GlobalData.gBinCacheSource:\r
+ EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X))\r
+ else:\r
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
+ finally:\r
+ if FileLock:\r
+ FileLock.release()\r
else:\r
try:\r
with open(File, OpenMode) as Fd:\r
Fd.write(Content)\r
except IOError as X:\r
else:\r
try:\r
with open(File, OpenMode) as Fd:\r
Fd.write(Content)\r
except IOError as X:\r
- EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
+ if GlobalData.gBinCacheSource:\r
+ EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X))\r
+ else:\r
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
+ finally:\r
+ if FileLock:\r
+ FileLock.release()\r
# @retval True The two files content are different and the file is copied\r
# @retval False No copy really happen\r
#\r
# @retval True The two files content are different and the file is copied\r
# @retval False No copy really happen\r
#\r
-def CopyFileOnChange(SrcFile, Dst):\r
+def CopyFileOnChange(SrcFile, Dst, FileLock=None):\r
if not os.path.exists(SrcFile):\r
return False\r
\r
if not os.path.exists(SrcFile):\r
return False\r
\r
if not os.access(DirName, os.W_OK):\r
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)\r
\r
if not os.access(DirName, os.W_OK):\r
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)\r
\r
+ # use default file_lock if no input new lock\r
+ if not FileLock:\r
+ FileLock = GlobalData.file_lock\r
+ if FileLock:\r
+ FileLock.acquire()\r
+\r
# os.replace and os.rename are the atomic operations in python 3 and 2.\r
# we use these two atomic operations to ensure the file copy is atomic:\r
# copy the src to a temp file in the dst same folder firstly, then\r
# os.replace and os.rename are the atomic operations in python 3 and 2.\r
# we use these two atomic operations to ensure the file copy is atomic:\r
# copy the src to a temp file in the dst same folder firstly, then\r
if GlobalData.gIsWindows and os.path.exists(DstFile):\r
os.remove(DstFile)\r
os.rename(tempname, DstFile)\r
if GlobalData.gIsWindows and os.path.exists(DstFile):\r
os.remove(DstFile)\r
os.rename(tempname, DstFile)\r
- EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='IOError %s' % X)\r
+ if GlobalData.gBinCacheSource:\r
+ EdkLogger.quiet("[cache error]:fails to copy file with error: %s" % (X))\r
+ else:\r
+ EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='IOError %s' % X)\r
+ finally:\r
+ if FileLock:\r
+ FileLock.release()\r
file_lock = mp.Lock()\r
error_event = mp.Event()\r
GlobalData.file_lock = file_lock\r
file_lock = mp.Lock()\r
error_event = mp.Event()\r
GlobalData.file_lock = file_lock\r
+ cache_lock = mp.Lock()\r
+ GlobalData.cache_lock = cache_lock\r
FfsCmd = DataPipe.Get("FfsCommand")\r
if FfsCmd is None:\r
FfsCmd = {}\r
GlobalData.FfsCmd = FfsCmd\r
GlobalData.libConstPcd = DataPipe.Get("LibConstPcd")\r
GlobalData.Refes = DataPipe.Get("REFS")\r
FfsCmd = DataPipe.Get("FfsCommand")\r
if FfsCmd is None:\r
FfsCmd = {}\r
GlobalData.FfsCmd = FfsCmd\r
GlobalData.libConstPcd = DataPipe.Get("LibConstPcd")\r
GlobalData.Refes = DataPipe.Get("REFS")\r
- auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)]\r
+ auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,cache_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)]\r
self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)\r
self.AutoGenMgr.start()\r
for w in auto_workers:\r
self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)\r
self.AutoGenMgr.start()\r
for w in auto_workers:\r
for PkgName in GlobalData.gPackageHash.keys():\r
GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]\r
GlobalData.file_lock = mp.Lock()\r
for PkgName in GlobalData.gPackageHash.keys():\r
GlobalData.gCacheIR[(PkgName, 'PackageHash')] = GlobalData.gPackageHash[PkgName]\r
GlobalData.file_lock = mp.Lock()\r
+ GlobalData.cache_lock = mp.Lock()\r
GlobalData.FfsCmd = CmdListDict\r
\r
self.Progress.Stop("done!")\r
GlobalData.FfsCmd = CmdListDict\r
\r
self.Progress.Stop("done!")\r