# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>\r
# Copyright (c) 2019, American Megatrends, Inc. All rights reserved.<BR>\r
#\r
-# This program and the accompanying materials\r
-# are licensed and made available under the terms and conditions of the BSD License\r
-# which accompanies this distribution. The full text of the license may be found at\r
-# http://opensource.org/licenses/bsd-license.php\r
-#\r
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
#\r
\r
## Import Modules\r
\r
from .StrGather import *\r
from .BuildEngine import BuildRule\r
-\r
+import shutil\r
from Common.LongFilePathSupport import CopyLongFilePath\r
from Common.BuildToolError import *\r
from Common.DataType import *\r
## @AsBuilt${BEGIN}\r
## ${flags_item}${END}\r
""")\r
+## Split command line option string to list\r
+#\r
+# subprocess.Popen needs the args to be a sequence. Otherwise there's problem\r
+# in non-windows platform to launch command\r
+#\r
+def _SplitOption(OptionString):\r
+ OptionList = []\r
+ LastChar = " "\r
+ OptionStart = 0\r
+ QuotationMark = ""\r
+ for Index in range(0, len(OptionString)):\r
+ CurrentChar = OptionString[Index]\r
+ if CurrentChar in ['"', "'"]:\r
+ if QuotationMark == CurrentChar:\r
+ QuotationMark = ""\r
+ elif QuotationMark == "":\r
+ QuotationMark = CurrentChar\r
+ continue\r
+ elif QuotationMark:\r
+ continue\r
+\r
+ if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:\r
+ if Index > OptionStart:\r
+ OptionList.append(OptionString[OptionStart:Index - 1])\r
+ OptionStart = Index\r
+ LastChar = CurrentChar\r
+ OptionList.append(OptionString[OptionStart:])\r
+ return OptionList\r
+\r
+#\r
+# Convert string to C format array\r
+#\r
+def _ConvertStringToByteArray(Value):\r
+ Value = Value.strip()\r
+ if not Value:\r
+ return None\r
+ if Value[0] == '{':\r
+ if not Value.endswith('}'):\r
+ return None\r
+ Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r
+ ValFields = Value.split(',')\r
+ try:\r
+ for Index in range(len(ValFields)):\r
+ ValFields[Index] = str(int(ValFields[Index], 0))\r
+ except ValueError:\r
+ return None\r
+ Value = '{' + ','.join(ValFields) + '}'\r
+ return Value\r
+\r
+ Unicode = False\r
+ if Value.startswith('L"'):\r
+ if not Value.endswith('"'):\r
+ return None\r
+ Value = Value[1:]\r
+ Unicode = True\r
+ elif not Value.startswith('"') or not Value.endswith('"'):\r
+ return None\r
+\r
+ Value = eval(Value) # translate escape character\r
+ NewValue = '{'\r
+ for Index in range(0, len(Value)):\r
+ if Unicode:\r
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r
+ else:\r
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r
+ Value = NewValue + '0}'\r
+ return Value\r
\r
## Base class for AutoGen\r
#\r
RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)\r
return RetVal\r
\r
- def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
- super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
\r
## hash() operator\r
#\r
# call super().__init__ then call the worker function with different parameter count\r
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
if not hasattr(self, "_Init"):\r
- super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._Init = True\r
\r
\r
# generate the SourcePcdDict and BinaryPcdDict\r
PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r
- for BuildData in PGen.BuildDatabase._CACHE_.values():\r
+ for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
if BuildData.Arch != Arch:\r
continue\r
if BuildData.MetaFile.Ext == '.inf':\r
if NewPcd2 not in GlobalData.MixedPcd[item]:\r
GlobalData.MixedPcd[item].append(NewPcd2)\r
\r
- for BuildData in PGen.BuildDatabase._CACHE_.values():\r
+ for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
if BuildData.Arch != Arch:\r
continue\r
for key in BuildData.Pcds:\r
#\r
# Generate Package level hash value\r
#\r
- GlobalData.gPackageHash[Arch] = {}\r
+ GlobalData.gPackageHash = {}\r
if GlobalData.gUseHashCache:\r
for Pkg in Pkgs:\r
self._GenPkgLevelHash(Pkg)\r
#\r
content = 'gCommandLineDefines: '\r
content += str(GlobalData.gCommandLineDefines)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
content += 'BuildOptionPcd: '\r
content += str(GlobalData.BuildOptionPcd)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
content += 'Active Platform: '\r
content += str(self.Platform)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
if self.FdfFile:\r
content += 'Flash Image Definition: '\r
content += str(self.FdfFile)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)\r
\r
#\r
if Pa.PcdTokenNumber:\r
if Pa.DynamicPcdList:\r
for Pcd in Pa.DynamicPcdList:\r
- PcdTokenNumber += os.linesep\r
+ PcdTokenNumber += TAB_LINE_BREAK\r
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))\r
PcdTokenNumber += ' : '\r
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])\r
for files in AllWorkSpaceMetaFiles:\r
if files.endswith('.dec'):\r
continue\r
- f = open(files, 'r')\r
+ f = open(files, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
- SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)\r
+ SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)\r
GlobalData.gPlatformHash = m.hexdigest()\r
\r
#\r
return True\r
\r
def _GenPkgLevelHash(self, Pkg):\r
- if Pkg.PackageName in GlobalData.gPackageHash[Pkg.Arch]:\r
+ if Pkg.PackageName in GlobalData.gPackageHash:\r
return\r
\r
PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName)\r
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r
m = hashlib.md5()\r
# Get .dec file's hash value\r
- f = open(Pkg.MetaFile.Path, 'r')\r
+ f = open(Pkg.MetaFile.Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
for Root, Dirs, Files in os.walk(str(inc)):\r
for File in sorted(Files):\r
File_Path = os.path.join(Root, File)\r
- f = open(File_Path, 'r')\r
+ f = open(File_Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
- SaveFileOnChange(HashFile, m.hexdigest(), True)\r
- GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()\r
+ SaveFileOnChange(HashFile, m.hexdigest(), False)\r
+ GlobalData.gPackageHash[Pkg.PackageName] = m.hexdigest()\r
\r
def _GetMetaFiles(self, Target, Toolchain, Arch):\r
AllWorkSpaceMetaFiles = set()\r
def _CheckAllPcdsTokenValueConflict(self):\r
for Pa in self.AutoGenObjectList:\r
for Package in Pa.PackageList:\r
- PcdList = Package.Pcds.values()\r
+ PcdList = list(Package.Pcds.values())\r
PcdList.sort(key=lambda x: int(x.TokenValue, 0))\r
Count = 0\r
while (Count < len(PcdList) - 1) :\r
Count += SameTokenValuePcdListCount\r
Count += 1\r
\r
- PcdList = Package.Pcds.values()\r
+ PcdList = list(Package.Pcds.values())\r
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r
Count = 0\r
while (Count < len(PcdList) - 1) :\r
# call super().__init__ then call the worker function with different parameter count\r
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
if not hasattr(self, "_Init"):\r
- super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)\r
self._Init = True\r
#\r
self.BuildTarget = Target\r
self.Arch = Arch\r
self.SourceDir = PlatformFile.SubDir\r
- self.SourceOverrideDir = None\r
self.FdTargetList = self.Workspace.FdTargetList\r
self.FvTargetList = self.Workspace.FvTargetList\r
- self.AllPcdList = []\r
# get the original module/package/platform objects\r
self.BuildDatabase = Workspace.BuildDatabase\r
self.DscBuildDataObj = Workspace.Platform\r
\r
return True\r
\r
+ ## hash() operator of PlatformAutoGen\r
+ #\r
+ # The platform file path and arch string will be used to represent\r
+ # hash value of this object\r
+ #\r
+ # @retval int Hash value of the platform file path and arch\r
+ #\r
+ @cached_class_function\r
+ def __hash__(self):\r
+ return hash((self.MetaFile, self.Arch))\r
+\r
@cached_class_function\r
def __repr__(self):\r
return "%s [%s]" % (self.MetaFile, self.Arch)\r
#\r
@cached_class_function\r
def CreateCodeFile(self, CreateModuleCodeFile=False):\r
- # only module has code to be greated, so do nothing if CreateModuleCodeFile is False\r
+ # only module has code to be created, so do nothing if CreateModuleCodeFile is False\r
if not CreateModuleCodeFile:\r
return\r
\r
\r
self.IsMakeFileCreated = True\r
\r
+ @property\r
+ def AllPcdList(self):\r
+ return self.DynamicPcdList + self.NonDynamicPcdList\r
## Deal with Shared FixedAtBuild Pcds\r
#\r
def CollectFixedAtBuildPcds(self):\r
if os.path.exists(VpdMapFilePath):\r
OrgVpdFile.Read(VpdMapFilePath)\r
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])\r
- NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'\r
+ NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'\r
else:\r
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
\r
if (self.Workspace.ArchList[-1] == self.Arch):\r
for Pcd in self._DynamicPcdList:\r
# just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList.values()[0]\r
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
Sku.VpdOffset = Sku.VpdOffset.strip()\r
\r
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
if not FoundFlag :\r
# just pick the a value to determine whether is unicode string type\r
SkuValueMap = {}\r
- SkuObjList = DscPcdEntry.SkuInfoList.items()\r
+ SkuObjList = list(DscPcdEntry.SkuInfoList.items())\r
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)\r
if DefaultSku:\r
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))\r
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]\r
# Only fix the value while no value provided in DSC file.\r
if not Sku.DefaultValue:\r
- DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue\r
+ DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue\r
\r
if DscPcdEntry not in self._DynamicPcdList:\r
self._DynamicPcdList.append(DscPcdEntry)\r
PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName\r
if (PcdName,PcdGuid) in VpdSkuMap:\r
DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)\r
- VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[DefaultSku]}\r
+ VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}\r
\r
# Process VPD map file generated by third party BPDG tool\r
if NeedProcessVpdMapFile:\r
# Delete the DynamicPcdList At the last time enter into this function\r
for Pcd in self._DynamicPcdList:\r
# just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList.values()[0]\r
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
Sku.VpdOffset = Sku.VpdOffset.strip()\r
\r
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
for pcd in self._DynamicPcdList:\r
if len(pcd.SkuInfoList) == 1:\r
for (SkuName, SkuId) in allskuset:\r
- if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:\r
+ if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:\r
continue\r
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r
pcd.SkuInfoList[SkuName].SkuId = SkuId\r
pcd.SkuInfoList[SkuName].SkuIdName = SkuName\r
- self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList\r
\r
def FixVpdOffset(self, VpdFile ):\r
FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)\r
def BuildCommand(self):\r
RetVal = []\r
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:\r
- RetVal += SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r
+ RetVal += _SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r
if "FLAGS" in self.ToolDefinition["MAKE"]:\r
NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()\r
if NewOption != '':\r
- RetVal += SplitOption(NewOption)\r
+ RetVal += _SplitOption(NewOption)\r
if "MAKE" in self.EdkIIBuildOption:\r
if "FLAGS" in self.EdkIIBuildOption["MAKE"]:\r
Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]\r
\r
## Get tool chain definition\r
#\r
- # Get each tool defition for given tool chain from tools_def.txt and platform\r
+ # Get each tool definition for given tool chain from tools_def.txt and platform\r
#\r
@cached_property\r
def ToolDefinition(self):\r
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)\r
ToolsDef += "\n"\r
\r
- SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)\r
+ SaveFileOnChange(self.ToolDefinitionFile, ToolsDef, False)\r
for DllPath in DllPathList:\r
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]\r
os.environ["MAKE_FLAGS"] = MakeFlags\r
\r
## Override PCD setting (type, value, ...)\r
#\r
- # @param ToPcd The PCD to be overrided\r
- # @param FromPcd The PCD overrideing from\r
+ # @param ToPcd The PCD to be overridden\r
+ # @param FromPcd The PCD overriding from\r
#\r
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):\r
#\r
\r
## Apply PCD setting defined platform to a module\r
#\r
- # @param Module The module from which the PCD setting will be overrided\r
+ # @param Module The module from which the PCD setting will be overridden\r
#\r
# @retval PCD_list The list PCDs with settings from platform\r
#\r
Pcd.MaxDatumSize = str(len(Value.split(',')))\r
else:\r
Pcd.MaxDatumSize = str(len(Value) - 1)\r
- return Pcds.values()\r
+ return list(Pcds.values())\r
\r
\r
\r
# Use the highest priority value.\r
#\r
if (len(OverrideList) >= 2):\r
- KeyList = OverrideList.keys()\r
+ KeyList = list(OverrideList.keys())\r
for Index in range(len(KeyList)):\r
NowKey = KeyList[Index]\r
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")\r
\r
## Append build options in platform to a module\r
#\r
- # @param Module The module to which the build options will be appened\r
+ # @param Module The module to which the build options will be appended\r
#\r
# @retval options The options appended with build options in platform\r
#\r
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r
BuildRuleOrder = Options[Tool][Attr]\r
\r
- AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +\r
- PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +\r
- self.ToolDefinition.keys())\r
+ AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +\r
+ list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +\r
+ list(self.ToolDefinition.keys()))\r
BuildOptions = defaultdict(lambda: defaultdict(str))\r
for Tool in AllTools:\r
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r
# call super().__init__ then call the worker function with different parameter count\r
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
if not hasattr(self, "_Init"):\r
- super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r
self._Init = True\r
\r
self.SourceDir = self.MetaFile.SubDir\r
self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r
\r
- self.SourceOverrideDir = None\r
- # use overrided path defined in DSC file\r
- if self.MetaFile.Key in GlobalData.gOverrideDir:\r
- self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]\r
-\r
self.ToolChain = Toolchain\r
self.BuildTarget = Target\r
self.Arch = Arch\r
self.ReferenceModules = []\r
self.ConstPcd = {}\r
\r
+ ## hash() operator of ModuleAutoGen\r
+ #\r
+ # The module file path and arch string will be used to represent\r
+ # hash value of this object\r
+ #\r
+ # @retval int Hash value of the module file path and arch\r
+ #\r
+ @cached_class_function\r
+ def __hash__(self):\r
+ return hash((self.MetaFile, self.Arch))\r
\r
def __repr__(self):\r
return "%s [%s]" % (self.MetaFile, self.Arch)\r
def Guid(self):\r
#\r
# To build same module more than once, the module path with FILE_GUID overridden has\r
- # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path\r
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r
# in DSC. The overridden GUID can be retrieved from file name\r
#\r
if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r
self.MetaFile.BaseName\r
))\r
\r
- ## Return the directory to store the intermediate object files of the mdoule\r
+ ## Return the directory to store the intermediate object files of the module\r
@cached_property\r
def OutputDir(self):\r
return _MakeDir((self.BuildDir, "OUTPUT"))\r
return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
return ''\r
\r
- ## Return the directory to store auto-gened source files of the mdoule\r
+ ## Return the directory to store auto-gened source files of the module\r
@cached_property\r
def DebugDir(self):\r
return _MakeDir((self.BuildDir, "DEBUG"))\r
RetVal = {}\r
for Type in self.Module.CustomMakefile:\r
MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r
- if self.SourceOverrideDir is not None:\r
- File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])\r
- if not os.path.exists(File):\r
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
- else:\r
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
+ File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
RetVal[MakeType] = File\r
return RetVal\r
\r
\r
## Get the depex string\r
#\r
- # @return : a string contain all depex expresion.\r
+ # @return : a string contain all depex expression.\r
def _GetDepexExpresionString(self):\r
DepexStr = ''\r
DepexList = []\r
for M in [self.Module] + self.DependentLibraryList:\r
Filename = M.MetaFile.Path\r
InfObj = InfSectionParser.InfSectionParser(Filename)\r
- DepexExpresionList = InfObj.GetDepexExpresionList()\r
- for DepexExpresion in DepexExpresionList:\r
- for key in DepexExpresion:\r
+ DepexExpressionList = InfObj.GetDepexExpresionList()\r
+ for DepexExpression in DepexExpressionList:\r
+ for key in DepexExpression:\r
Arch, ModuleType = key\r
- DepexExpr = [x for x in DepexExpresion[key] if not str(x).startswith('#')]\r
+ DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r
# the type of build module is USER_DEFINED.\r
# All different DEPEX section tags would be copied into the As Built INF file\r
# and there would be separate DEPEX section tags\r
\r
DepexList = []\r
#\r
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r
#\r
for M in [self.Module] + self.DependentLibraryList:\r
Inherited = False\r
if '.' not in item:\r
NewList.append(item)\r
else:\r
- if item not in self.FixedVoidTypePcds:\r
+ FixedVoidTypePcds = {}\r
+ if item in self.FixedVoidTypePcds:\r
+ FixedVoidTypePcds = self.FixedVoidTypePcds\r
+ elif M in self.PlatformInfo.LibraryAutoGenList:\r
+ Index = self.PlatformInfo.LibraryAutoGenList.index(M)\r
+ FixedVoidTypePcds = self.PlatformInfo.LibraryAutoGenList[Index].FixedVoidTypePcds\r
+ if item not in FixedVoidTypePcds:\r
EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r
else:\r
- Value = self.FixedVoidTypePcds[item]\r
+ Value = FixedVoidTypePcds[item]\r
if len(Value.split(',')) != 16:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r
# EDK II modules must not reference header files outside of the packages they depend on or\r
# within the module's directory tree. Report error if violation.\r
#\r
- for Path in IncPathList:\r
- if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
- ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
- EdkLogger.error("build",\r
- PARAMETER_INVALID,\r
- ExtraData=ErrMsg,\r
- File=str(self.MetaFile))\r
+ if GlobalData.gDisableIncludePathCheck == False:\r
+ for Path in IncPathList:\r
+ if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
+ ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
+ EdkLogger.error("build",\r
+ PARAMETER_INVALID,\r
+ ExtraData=ErrMsg,\r
+ File=str(self.MetaFile))\r
RetVal += IncPathList\r
return RetVal\r
\r
self.BuildOption\r
for SingleFile in FileList:\r
if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r
- key = SingleFile.Path.split(SingleFile.Ext)[0]\r
+ key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r
if key in Order_Dict:\r
Order_Dict[key].append(SingleFile.Ext)\r
else:\r
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringH)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringIdf)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
IdfGenBinBuffer.close()\r
return RetVal\r
\r
- ## Return the list of library modules explicitly or implicityly used by this module\r
+ ## Return the list of library modules explicitly or implicitly used by this module\r
@cached_property\r
def DependentLibraryList(self):\r
# only merge library classes and PCD for non-library module\r
RetVal.append(PackageDir)\r
IncludesList = Package.Includes\r
if Package._PrivateIncludes:\r
- if not self.MetaFile.Path.startswith(PackageDir):\r
+ if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r
IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
for Inc in IncludesList:\r
if Inc not in RetVal:\r
Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r
if not Guid:\r
break\r
- NameArray = ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
+ NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r
Pos = Content.find('efivarstore', Name.end())\r
if not NameGuids:\r
Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r
if not Value:\r
continue\r
- Name = ConvertStringToByteArray(SkuInfo.VariableName)\r
+ Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r
Guid = GuidStructureStringToGuidString(Value)\r
if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r
HiiExPcds.append(Pcd)\r
return None\r
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r
- VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())\r
+ VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r
if not VfrUniOffsetList:\r
return None\r
\r
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# GUID + Offset\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
- UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
+ fStringIO.write(UniGuid)\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# GUID + Offset\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
- VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
+ fStringIO.write(VfrGuid)\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
#\r
fInputfile.close ()\r
return OutputName\r
\r
+ @cached_property\r
+ def OutputFile(self):\r
+ retVal = set()\r
+ OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
+ DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
+ for Item in self.CodaTargetList:\r
+ File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
+ retVal.add(File)\r
+ if self.DepexGenerated:\r
+ retVal.add(self.Name + '.depex')\r
+\r
+ Bin = self._GenOffsetBin()\r
+ if Bin:\r
+ retVal.add(Bin)\r
+\r
+ for Root, Dirs, Files in os.walk(OutputDir):\r
+ for File in Files:\r
+ if File.lower().endswith('.pdb'):\r
+ retVal.add(File)\r
+\r
+ return retVal\r
+\r
## Create AsBuilt INF file the module\r
#\r
- def CreateAsBuiltInf(self, IsOnlyCopy = False):\r
- self.OutputFile = set()\r
- if IsOnlyCopy and GlobalData.gBinCacheDest:\r
- self.CopyModuleToCache()\r
- return\r
+ def CreateAsBuiltInf(self):\r
\r
if self.IsAsBuiltInfCreated:\r
return\r
\r
- # Skip the following code for libraries\r
+ # Skip INF file generation for libraries\r
if self.IsLibrary:\r
return\r
\r
DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
for Item in self.CodaTargetList:\r
File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- self.OutputFile.add(File)\r
if os.path.isabs(File):\r
File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
if Item.Target.Ext.lower() == '.aml':\r
if os.path.exists(DepexFile):\r
self.DepexGenerated = True\r
if self.DepexGenerated:\r
- self.OutputFile.add(self.Name + '.depex')\r
if self.ModuleType in [SUP_MODULE_PEIM]:\r
AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r
elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r
Bin = self._GenOffsetBin()\r
if Bin:\r
AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r
- self.OutputFile.add(Bin)\r
\r
for Root, Dirs, Files in os.walk(OutputDir):\r
for File in Files:\r
if File.lower().endswith('.pdb'):\r
AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r
- self.OutputFile.add(File)\r
HeaderComments = self.Module.HeaderComments\r
StartPos = 0\r
for Index in range(len(HeaderComments)):\r
Padding = '0x00, '\r
if Unicode:\r
Padding = Padding * 2\r
- ArraySize = ArraySize / 2\r
+ ArraySize = ArraySize // 2\r
if ArraySize < (len(PcdValue) + 1):\r
if Pcd.MaxSizeUserSet:\r
EdkLogger.error("build", AUTOGEN_ERROR,\r
AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
\r
# Generated depex expression section in comments.\r
- DepexExpresion = self._GetDepexExpresionString()\r
- AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''\r
+ DepexExpression = self._GetDepexExpresionString()\r
+ AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r
\r
AsBuiltInf = TemplateString()\r
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r
SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r
\r
self.IsAsBuiltInfCreated = True\r
- if GlobalData.gBinCacheDest:\r
- self.CopyModuleToCache()\r
\r
def CopyModuleToCache(self):\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
CreateDirectory (FileDir)\r
HashFile = path.join(self.BuildDir, self.Name + '.hash')\r
- ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
if os.path.exists(HashFile):\r
- shutil.copy2(HashFile, FileDir)\r
+ CopyFileOnChange(HashFile, FileDir)\r
+ ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
if os.path.exists(ModuleFile):\r
- shutil.copy2(ModuleFile, FileDir)\r
+ CopyFileOnChange(ModuleFile, FileDir)\r
+\r
if not self.OutputFile:\r
- Ma = self.BuildDatabase[PathClass(ModuleFile), self.Arch, self.BuildTarget, self.ToolChain]\r
+ Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
self.OutputFile = Ma.Binaries\r
- if self.OutputFile:\r
- for File in self.OutputFile:\r
- File = str(File)\r
- if not os.path.isabs(File):\r
- File = os.path.join(self.OutputDir, File)\r
- if os.path.exists(File):\r
- shutil.copy2(File, FileDir)\r
+\r
+ for File in self.OutputFile:\r
+ File = str(File)\r
+ if not os.path.isabs(File):\r
+ File = os.path.join(self.OutputDir, File)\r
+ if os.path.exists(File):\r
+ sub_dir = os.path.relpath(File, self.OutputDir)\r
+ destination_file = os.path.join(FileDir, sub_dir)\r
+ destination_dir = os.path.dirname(destination_file)\r
+ CreateDirectory(destination_dir)\r
+ CopyFileOnChange(File, destination_dir)\r
\r
def AttemptModuleCacheCopy(self):\r
+ # If library or Module is binary do not skip by hash\r
if self.IsBinaryModule:\r
return False\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+ # .inc is contains binary information so do not skip by hash as well\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ return False\r
+ FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
HashFile = path.join(FileDir, self.Name + '.hash')\r
if os.path.exists(HashFile):\r
f = open(HashFile, 'r')\r
CacheHash = f.read()\r
f.close()\r
+ self.GenModuleHash()\r
if GlobalData.gModuleHash[self.Arch][self.Name]:\r
if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r
for root, dir, files in os.walk(FileDir):\r
for f in files:\r
if self.Name + '.hash' in f:\r
- shutil.copy2(HashFile, self.BuildDir)\r
+ CopyFileOnChange(HashFile, self.BuildDir)\r
else:\r
File = path.join(root, f)\r
- shutil.copy2(File, self.OutputDir)\r
+ sub_dir = os.path.relpath(File, FileDir)\r
+ destination_file = os.path.join(self.OutputDir, sub_dir)\r
+ destination_dir = os.path.dirname(destination_file)\r
+ CreateDirectory(destination_dir)\r
+ CopyFileOnChange(File, destination_dir)\r
if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
return True\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateMakeFile()\r
\r
- if self.CanSkip():\r
+ # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
+ if not GlobalData.gUseHashCache and self.CanSkip():\r
return\r
\r
if len(self.CustomMakefile) == 0:\r
for LibraryAutoGen in self.LibraryAutoGenList:\r
LibraryAutoGen.CreateCodeFile()\r
\r
- if self.CanSkip():\r
+ # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
+ if not GlobalData.gUseHashCache and self.CanSkip():\r
return\r
\r
AutoGenList = []\r
return RetVal\r
\r
def GenModuleHash(self):\r
+ # Initialize a dictionary for each arch type\r
if self.Arch not in GlobalData.gModuleHash:\r
GlobalData.gModuleHash[self.Arch] = {}\r
+\r
+ # Early exit if module or library has been hashed and is in memory\r
+ if self.Name in GlobalData.gModuleHash[self.Arch]:\r
+ return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+\r
+ # Initialze hash object\r
m = hashlib.md5()\r
+\r
# Add Platform level hash\r
- m.update(GlobalData.gPlatformHash)\r
+ m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
+\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r
- m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r
+ if Pkg.PackageName in GlobalData.gPackageHash:\r
+ m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r
+ m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'r')\r
+ f = open(str(self.MetaFile), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
+\r
# Add Module's source files\r
if self.SourceFileList:\r
for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'r')\r
+ f = open(str(File), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
\r
- ModuleHashFile = path.join(self.BuildDir, self.Name + ".hash")\r
- if self.Name not in GlobalData.gModuleHash[self.Arch]:\r
- GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
- if GlobalData.gBinCacheSource:\r
- if self.AttemptModuleCacheCopy():\r
- return False\r
- return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)\r
+ GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
+\r
+ return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
def CanSkipbyHash(self):\r
- if GlobalData.gUseHashCache:\r
- return not self.GenModuleHash()\r
- return False\r
+ # Hashing feature is off\r
+ if not GlobalData.gUseHashCache:\r
+ return False\r
+\r
+ # Initialize a dictionary for each arch type\r
+ if self.Arch not in GlobalData.gBuildHashSkipTracking:\r
+ GlobalData.gBuildHashSkipTracking[self.Arch] = dict()\r
+\r
+ # If library or Module is binary do not skip by hash\r
+ if self.IsBinaryModule:\r
+ return False\r
+\r
+ # .inc is contains binary information so do not skip by hash as well\r
+ for f_ext in self.SourceFileList:\r
+ if '.inc' in str(f_ext):\r
+ return False\r
+\r
+ # Use Cache, if exists and if Module has a copy in cache\r
+ if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy():\r
+ return True\r
+\r
+ # Early exit for libraries that haven't yet finished building\r
+ HashFile = path.join(self.BuildDir, self.Name + ".hash")\r
+ if self.IsLibrary and not os.path.exists(HashFile):\r
+ return False\r
+\r
+ # Return a Boolean based on if can skip by hash, either from memory or from IO.\r
+ if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]:\r
+ # If hashes are the same, SaveFileOnChange() will return False.\r
+ GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True)\r
+ return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
+ else:\r
+ return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
# If any source file is newer than the module than we cannot skip\r