from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
from AutoGen.CacheIR import ModuleBuildCacheIR\r
import json\r
+import tempfile\r
\r
## Mapping Makefile type\r
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
def BuildCommand(self):\r
return self.PlatformInfo.BuildCommand\r
\r
- ## Get object list of all packages the module and its dependent libraries belong to\r
+ ## Get Module package and Platform package\r
+ #\r
+ # @retval list The list of package object\r
+ #\r
+ @cached_property\r
+ def PackageList(self):\r
+ PkagList = []\r
+ if self.Module.Packages:\r
+ PkagList.extend(self.Module.Packages)\r
+ Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
+ for Package in Platform.Packages:\r
+ if Package in PkagList:\r
+ continue\r
+ PkagList.append(Package)\r
+ return PkagList\r
+\r
+ ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
#\r
# @retval list The list of package object\r
#\r
@cached_property\r
def DerivedPackageList(self):\r
PackageList = []\r
- for M in [self.Module] + self.DependentLibraryList:\r
+ PackageList.extend(self.PackageList)\r
+ for M in self.DependentLibraryList:\r
for Package in M.Packages:\r
if Package in PackageList:\r
continue\r
self.Targets\r
return self._FileTypes\r
\r
- ## Get the list of package object the module depends on\r
+ ## Get the list of package object the module depends on and the Platform depends on\r
#\r
# @retval list The package object list\r
#\r
@cached_property\r
def DependentPackageList(self):\r
- return self.Module.Packages\r
+ return self.PackageList\r
\r
## Return the list of auto-generated code file\r
#\r
RetVal.append(self.MetaFile.Dir)\r
RetVal.append(self.DebugDir)\r
\r
- for Package in self.Module.Packages:\r
+ for Package in self.PackageList:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
if PackageDir not in RetVal:\r
RetVal.append(PackageDir)\r
for Inc in IncludesList:\r
if Inc not in RetVal:\r
RetVal.append(str(Inc))\r
+ RetVal.extend(self.IncPathFromBuildOptions)\r
return RetVal\r
\r
+ @cached_property\r
+ def IncPathFromBuildOptions(self):\r
+ IncPathList = []\r
+ for tool in self.BuildOption:\r
+ if 'FLAGS' in self.BuildOption[tool]:\r
+ flags = self.BuildOption[tool]['FLAGS']\r
+ whitespace = False\r
+ for flag in flags.split(" "):\r
+ flag = flag.strip()\r
+ if flag.startswith(("/I","-I")):\r
+ if len(flag)>2:\r
+ if os.path.exists(flag[2:]):\r
+ IncPathList.append(flag[2:])\r
+ else:\r
+ whitespace = True\r
+ continue\r
+ if whitespace and flag:\r
+ if os.path.exists(flag):\r
+ IncPathList.append(flag)\r
+ whitespace = False\r
+ return IncPathList\r
+\r
@cached_property\r
def IncludePathLength(self):\r
return sum(len(inc)+1 for inc in self.IncludePathList)\r
@cached_property\r
def PackageIncludePathList(self):\r
IncludesList = []\r
- for Package in self.Module.Packages:\r
+ for Package in self.PackageList:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
IncludesList = Package.Includes\r
if Package._PrivateIncludes:\r
@cached_property\r
def OutputFile(self):\r
retVal = set()\r
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
- DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
- FfsOutputDir = self.FfsOutputDir.replace('\\', '/').rstrip('/')\r
- for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- retVal.add(File)\r
- if self.DepexGenerated:\r
- retVal.add(self.Name + '.depex')\r
-\r
- Bin = self._GenOffsetBin()\r
- if Bin:\r
- retVal.add(Bin)\r
\r
- for Root, Dirs, Files in os.walk(OutputDir):\r
+ for Root, Dirs, Files in os.walk(self.BuildDir):\r
for File in Files:\r
- if File.lower().endswith('.pdb'):\r
- retVal.add(File)\r
+ # lib file is already added through above CodaTargetList, skip it here\r
+ if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
+ NewFile = path.join(Root, File)\r
+ retVal.add(NewFile)\r
\r
- for Root, Dirs, Files in os.walk(FfsOutputDir):\r
+ for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
for File in Files:\r
- if File.lower().endswith('.ffs') or File.lower().endswith('.offset') or File.lower().endswith('.raw') \\r
- or File.lower().endswith('.raw.txt'):\r
- retVal.add(File)\r
+ NewFile = path.join(Root, File)\r
+ retVal.add(NewFile)\r
\r
return retVal\r
\r
\r
self.IsAsBuiltInfCreated = True\r
\r
- def CacheCopyFile(self, OriginDir, CopyDir, File):\r
- sub_dir = os.path.relpath(File, CopyDir)\r
- destination_file = os.path.join(OriginDir, sub_dir)\r
+ def CacheCopyFile(self, DestDir, SourceDir, File):\r
+ sub_dir = os.path.relpath(File, SourceDir)\r
+ destination_file = os.path.join(DestDir, sub_dir)\r
destination_dir = os.path.dirname(destination_file)\r
CreateDirectory(destination_dir)\r
try:\r
Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
self.OutputFile = Ma.Binaries\r
for File in self.OutputFile:\r
- File = str(File)\r
- if not os.path.isabs(File):\r
- NewFile = os.path.join(self.OutputDir, File)\r
- if not os.path.exists(NewFile):\r
- NewFile = os.path.join(self.FfsOutputDir, File)\r
- File = NewFile\r
if os.path.exists(File):\r
- if File.lower().endswith('.ffs') or File.lower().endswith('.offset') or File.lower().endswith('.raw') \\r
- or File.lower().endswith('.raw.txt'):\r
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)\r
else:\r
self.CacheCopyFile(FileDir, self.OutputDir, File)\r
try:\r
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
if os.path.exists(ModuleHashPair):\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
ModuleHashPairList.append((PreMakeHash, MakeHash))\r
\r
if os.path.exists (self.TimeStampPath):\r
os.remove (self.TimeStampPath)\r
- with open(self.TimeStampPath, 'w+') as fd:\r
- for f in FileSet:\r
- fd.write(f)\r
- fd.write("\n")\r
+\r
+ SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
\r
# Ignore generating makefile when it is a binary module\r
if self.IsBinaryModule:\r
MewIR.MakefilePath = MakefilePath\r
MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
MewIR.CreateMakeFileDone = True\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakefilePath = MakefilePath\r
# CanSkip uses timestamps to determine build skipping\r
if self.CanSkip():\r
return\r
-\r
+ self.LibraryAutoGenList\r
AutoGenList = []\r
IgoredAutoGenList = []\r
\r
self.IsCodeFileCreated = True\r
MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
MewIR.CreateCodeFileDone = True\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.CreateCodeFileDone = True\r
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'rb')\r
- Content = f.read()\r
- f.close()\r
+ with open(str(self.MetaFile), 'rb') as f:\r
+ Content = f.read()\r
m.update(Content)\r
\r
# Add Module's source files\r
if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
DependencyFileSet = set()\r
# Add Module Meta file\r
DependencyFileSet.add(self.MetaFile)\r
if not os.path.exists(str(File)):\r
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
continue\r
- f = open(str(File), 'rb')\r
- Content = f.read()\r
- f.close()\r
+ with open(str(File), 'rb') as f:\r
+ Content = f.read()\r
m.update(Content)\r
FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
MewIR.ModuleFilesHashDigest = m.digest()\r
MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
MewIR.ModuleFilesChain = FileList\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
try:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.ModuleFilesHashDigest = m.digest()\r
gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
\r
if not (self.MetaFile.Path, self.Arch) in gDict or \\r
not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
- EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
- return\r
+ EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+ return\r
\r
# Initialze hash object\r
m = hashlib.md5()\r
# Add Module self\r
m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.PreMakefileHashHexDigest = m.hexdigest()\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
self.CreateCodeFile()\r
if not (self.MetaFile.Path, self.Arch) in gDict or \\r
not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
- self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))\r
+ self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.Path, self.Arch),[]))\r
\r
if not (self.MetaFile.Path, self.Arch) in gDict or \\r
not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
m.update(Content)\r
FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.AutoGenFileList = self.AutoGenFileList.keys()\r
IR.MakeHeaderFilesHashChain = FileList\r
gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
return gDict[(self.MetaFile.Path, self.Arch)]\r
\r
+ # skip if the module cache already crashed\r
+ if (self.MetaFile.Path, self.Arch) in gDict and \\r
+ gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
New.sort(key=lambda x: str(x))\r
MakeHashChain += New\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeHashDigest = m.digest()\r
IR.MakeHashHexDigest = m.hexdigest()\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
+ if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
+ return True\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return False\r
+\r
# If Module is binary, do not skip by cache\r
if self.IsBinaryModule:\r
return False\r
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
if not os.path.exists(ModuleHashPair):\r
EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CacheCrash = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
return False\r
\r
try:\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
except:\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
return False\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.PreMakeCacheHit = True\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
if not GlobalData.gBinCacheSource:\r
return False\r
\r
+ if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
+ return True\r
+\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return False\r
+\r
# If Module is binary, do not skip by cache\r
if self.IsBinaryModule:\r
print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
# .inc is contains binary information so do not skip by hash as well\r
for f_ext in self.SourceFileList:\r
if '.inc' in str(f_ext):\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeCacheHit = False\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
if not os.path.exists(ModuleHashPair):\r
EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
+ with GlobalData.cache_lock:\r
+ IR = gDict[(self.MetaFile.Path, self.Arch)]\r
+ IR.CacheCrash = True\r
+ gDict[(self.MetaFile.Path, self.Arch)] = IR\r
return False\r
\r
try:\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
except:\r
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
return False\r
\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- with GlobalData.file_lock:\r
+ with GlobalData.cache_lock:\r
IR = gDict[(self.MetaFile.Path, self.Arch)]\r
IR.MakeCacheHit = True\r
gDict[(self.MetaFile.Path, self.Arch)] = IR\r
if not GlobalData.gBinCacheSource:\r
return\r
\r
+ # skip if the module cache already crashed\r
+ if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
+ return\r
+\r
# skip binary module\r
if self.IsBinaryModule:\r
return\r
return\r
\r
try:\r
- f = open(ModuleHashPair, 'r')\r
- ModuleHashPairList = json.load(f)\r
- f.close()\r
+ with open(ModuleHashPair, 'r') as f:\r
+ ModuleHashPairList = json.load(f)\r
except:\r
EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
return\r