## @file\r
# Generate AutoGen.h, AutoGen.c and *.depex files\r
#\r
-# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>\r
+# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>\r
# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>\r
+# Copyright (c) 2019, American Megatrends, Inc. All rights reserved.<BR>\r
#\r
# This program and the accompanying materials\r
# are licensed and made available under the terms and conditions of the BSD License\r
\r
from .StrGather import *\r
from .BuildEngine import BuildRule\r
-\r
+import shutil\r
from Common.LongFilePathSupport import CopyLongFilePath\r
from Common.BuildToolError import *\r
from Common.DataType import *\r
from collections import OrderedDict\r
from collections import defaultdict\r
from Workspace.WorkspaceCommon import OrderedListDict\r
+from Common.ToolDefClassObject import gDefaultToolsDefFile\r
\r
from Common.caching import cached_property, cached_class_function\r
\r
## Build rule configuration file\r
gDefaultBuildRuleFile = 'build_rule.txt'\r
\r
-## Tools definition configuration file\r
-gDefaultToolsDefFile = 'tools_def.txt'\r
-\r
## Build rule default version\r
AutoGenReqBuildRuleVerNum = "0.1"\r
\r
## @AsBuilt${BEGIN}\r
## ${flags_item}${END}\r
""")\r
+## Split command line option string to list\r
+#\r
+# subprocess.Popen needs the args to be a sequence. Otherwise there's problem\r
+# in non-windows platform to launch command\r
+#\r
+def _SplitOption(OptionString):\r
+ OptionList = []\r
+ LastChar = " "\r
+ OptionStart = 0\r
+ QuotationMark = ""\r
+ for Index in range(0, len(OptionString)):\r
+ CurrentChar = OptionString[Index]\r
+ if CurrentChar in ['"', "'"]:\r
+ if QuotationMark == CurrentChar:\r
+ QuotationMark = ""\r
+ elif QuotationMark == "":\r
+ QuotationMark = CurrentChar\r
+ continue\r
+ elif QuotationMark:\r
+ continue\r
+\r
+ if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:\r
+ if Index > OptionStart:\r
+ OptionList.append(OptionString[OptionStart:Index - 1])\r
+ OptionStart = Index\r
+ LastChar = CurrentChar\r
+ OptionList.append(OptionString[OptionStart:])\r
+ return OptionList\r
+\r
+#\r
+# Convert string to C format array\r
+#\r
+def _ConvertStringToByteArray(Value):\r
+ Value = Value.strip()\r
+ if not Value:\r
+ return None\r
+ if Value[0] == '{':\r
+ if not Value.endswith('}'):\r
+ return None\r
+ Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r
+ ValFields = Value.split(',')\r
+ try:\r
+ for Index in range(len(ValFields)):\r
+ ValFields[Index] = str(int(ValFields[Index], 0))\r
+ except ValueError:\r
+ return None\r
+ Value = '{' + ','.join(ValFields) + '}'\r
+ return Value\r
+\r
+ Unicode = False\r
+ if Value.startswith('L"'):\r
+ if not Value.endswith('"'):\r
+ return None\r
+ Value = Value[1:]\r
+ Unicode = True\r
+ elif not Value.startswith('"') or not Value.endswith('"'):\r
+ return None\r
+\r
+ Value = eval(Value) # translate escape character\r
+ NewValue = '{'\r
+ for Index in range(0, len(Value)):\r
+ if Unicode:\r
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r
+ else:\r
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r
+ Value = NewValue + '0}'\r
+ return Value\r
\r
## Base class for AutoGen\r
#\r
RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)\r
return RetVal\r
\r
- def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
- super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
\r
## hash() operator\r
#\r
# call super().__init__ then call the worker function with different parameter count\r
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
if not hasattr(self, "_Init"):\r
- super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._Init = True\r
\r
\r
# generate the SourcePcdDict and BinaryPcdDict\r
PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r
- for BuildData in PGen.BuildDatabase._CACHE_.values():\r
+ for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
if BuildData.Arch != Arch:\r
continue\r
if BuildData.MetaFile.Ext == '.inf':\r
if NewPcd2 not in GlobalData.MixedPcd[item]:\r
GlobalData.MixedPcd[item].append(NewPcd2)\r
\r
- for BuildData in PGen.BuildDatabase._CACHE_.values():\r
+ for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
if BuildData.Arch != Arch:\r
continue\r
for key in BuildData.Pcds:\r
'build',\r
PARSER_ERROR,\r
"PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),\r
- File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],\r
- Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]\r
+ File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],\r
+ Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]\r
)\r
else:\r
# Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.\r
'build',\r
PARSER_ERROR,\r
"Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),\r
- File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],\r
- Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]\r
+ File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],\r
+ Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]\r
)\r
\r
Pa = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r
#\r
content = 'gCommandLineDefines: '\r
content += str(GlobalData.gCommandLineDefines)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
content += 'BuildOptionPcd: '\r
content += str(GlobalData.BuildOptionPcd)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
content += 'Active Platform: '\r
content += str(self.Platform)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
if self.FdfFile:\r
content += 'Flash Image Definition: '\r
content += str(self.FdfFile)\r
- content += os.linesep\r
+ content += TAB_LINE_BREAK\r
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)\r
\r
#\r
if Pa.PcdTokenNumber:\r
if Pa.DynamicPcdList:\r
for Pcd in Pa.DynamicPcdList:\r
- PcdTokenNumber += os.linesep\r
+ PcdTokenNumber += TAB_LINE_BREAK\r
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))\r
PcdTokenNumber += ' : '\r
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])\r
for files in AllWorkSpaceMetaFiles:\r
if files.endswith('.dec'):\r
continue\r
- f = open(files, 'r')\r
+ f = open(files, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
- SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)\r
+ SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)\r
GlobalData.gPlatformHash = m.hexdigest()\r
\r
#\r
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r
m = hashlib.md5()\r
# Get .dec file's hash value\r
- f = open(Pkg.MetaFile.Path, 'r')\r
+ f = open(Pkg.MetaFile.Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
for Root, Dirs, Files in os.walk(str(inc)):\r
for File in sorted(Files):\r
File_Path = os.path.join(Root, File)\r
- f = open(File_Path, 'r')\r
+ f = open(File_Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
- SaveFileOnChange(HashFile, m.hexdigest(), True)\r
+ SaveFileOnChange(HashFile, m.hexdigest(), False)\r
GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()\r
\r
def _GetMetaFiles(self, Target, Toolchain, Arch):\r
def _CheckAllPcdsTokenValueConflict(self):\r
for Pa in self.AutoGenObjectList:\r
for Package in Pa.PackageList:\r
- PcdList = Package.Pcds.values()\r
+ PcdList = list(Package.Pcds.values())\r
PcdList.sort(key=lambda x: int(x.TokenValue, 0))\r
Count = 0\r
while (Count < len(PcdList) - 1) :\r
Count += SameTokenValuePcdListCount\r
Count += 1\r
\r
- PcdList = Package.Pcds.values()\r
+ PcdList = list(Package.Pcds.values())\r
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r
Count = 0\r
while (Count < len(PcdList) - 1) :\r
def GenFdsCommand(self):\r
return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()\r
\r
+ @property\r
+ def GenFdsCommandDict(self):\r
+ FdsCommandDict = {}\r
+ LogLevel = EdkLogger.GetLevel()\r
+ if LogLevel == EdkLogger.VERBOSE:\r
+ FdsCommandDict["verbose"] = True\r
+ elif LogLevel <= EdkLogger.DEBUG_9:\r
+ FdsCommandDict["debug"] = LogLevel - 1\r
+ elif LogLevel == EdkLogger.QUIET:\r
+ FdsCommandDict["quiet"] = True\r
+\r
+ if GlobalData.gEnableGenfdsMultiThread:\r
+ FdsCommandDict["GenfdsMultiThread"] = True\r
+ if GlobalData.gIgnoreSource:\r
+ FdsCommandDict["IgnoreSources"] = True\r
+\r
+ FdsCommandDict["OptionPcd"] = []\r
+ for pcd in GlobalData.BuildOptionPcd:\r
+ if pcd[2]:\r
+ pcdname = '.'.join(pcd[0:3])\r
+ else:\r
+ pcdname = '.'.join(pcd[0:2])\r
+ if pcd[3].startswith('{'):\r
+ FdsCommandDict["OptionPcd"].append(pcdname + '=' + 'H' + '"' + pcd[3] + '"')\r
+ else:\r
+ FdsCommandDict["OptionPcd"].append(pcdname + '=' + pcd[3])\r
+\r
+ MacroList = []\r
+ # macros passed to GenFds\r
+ MacroDict = {}\r
+ MacroDict.update(GlobalData.gGlobalDefines)\r
+ MacroDict.update(GlobalData.gCommandLineDefines)\r
+ for MacroName in MacroDict:\r
+ if MacroDict[MacroName] != "":\r
+ MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))\r
+ else:\r
+ MacroList.append('"%s"' % MacroName)\r
+ FdsCommandDict["macro"] = MacroList\r
+\r
+ FdsCommandDict["fdf_file"] = [self.FdfFile]\r
+ FdsCommandDict["build_target"] = self.BuildTarget\r
+ FdsCommandDict["toolchain_tag"] = self.ToolChain\r
+ FdsCommandDict["active_platform"] = str(self)\r
+\r
+ FdsCommandDict["conf_directory"] = GlobalData.gConfDirectory\r
+ FdsCommandDict["build_architecture_list"] = ','.join(self.ArchList)\r
+ FdsCommandDict["platform_build_directory"] = self.BuildDir\r
+\r
+ FdsCommandDict["fd"] = self.FdTargetList\r
+ FdsCommandDict["fv"] = self.FvTargetList\r
+ FdsCommandDict["cap"] = self.CapTargetList\r
+ return FdsCommandDict\r
+\r
## Create makefile for the platform and modules in it\r
#\r
# @param CreateDepsMakeFile Flag indicating if the makefile for\r
# call super().__init__ then call the worker function with different parameter count\r
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
if not hasattr(self, "_Init"):\r
- super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)\r
self._Init = True\r
#\r
self.BuildTarget = Target\r
self.Arch = Arch\r
self.SourceDir = PlatformFile.SubDir\r
- self.SourceOverrideDir = None\r
self.FdTargetList = self.Workspace.FdTargetList\r
self.FvTargetList = self.Workspace.FvTargetList\r
- self.AllPcdList = []\r
# get the original module/package/platform objects\r
self.BuildDatabase = Workspace.BuildDatabase\r
self.DscBuildDataObj = Workspace.Platform\r
#\r
@cached_class_function\r
def CreateCodeFile(self, CreateModuleCodeFile=False):\r
- # only module has code to be greated, so do nothing if CreateModuleCodeFile is False\r
+ # only module has code to be created, so do nothing if CreateModuleCodeFile is False\r
if not CreateModuleCodeFile:\r
return\r
\r
\r
self.IsMakeFileCreated = True\r
\r
+ @property\r
+ def AllPcdList(self):\r
+ return self.DynamicPcdList + self.NonDynamicPcdList\r
## Deal with Shared FixedAtBuild Pcds\r
#\r
def CollectFixedAtBuildPcds(self):\r
VpdRegionBase = FdRegion.Offset\r
break\r
\r
- VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj._GetSkuIds())\r
+ VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)\r
VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)\r
VariableInfo.SetVpdRegionOffset(VpdRegionBase)\r
Index = 0\r
if SkuId is None or SkuId == '':\r
continue\r
if len(Sku.VariableName) > 0:\r
+ if Sku.VariableAttribute and 'NV' not in Sku.VariableAttribute:\r
+ continue\r
VariableGuidStructure = Sku.VariableGuidValue\r
VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)\r
for StorageName in Sku.DefaultStoreDict:\r
- VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName], Pcd.DatumType))\r
+ VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName] if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES else StringToArray(Sku.DefaultStoreDict[StorageName]), Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))\r
Index += 1\r
return VariableInfo\r
\r
if os.path.exists(VpdMapFilePath):\r
OrgVpdFile.Read(VpdMapFilePath)\r
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])\r
- NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'\r
+ NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'\r
else:\r
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
\r
if (self.Workspace.ArchList[-1] == self.Arch):\r
for Pcd in self._DynamicPcdList:\r
# just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList.values()[0]\r
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
Sku.VpdOffset = Sku.VpdOffset.strip()\r
\r
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
self.VariableInfo = self.CollectVariables(self._DynamicPcdList)\r
vardump = self.VariableInfo.dump()\r
if vardump:\r
+ #\r
+ #According to PCD_DATABASE_INIT in edk2\MdeModulePkg\Include\Guid\PcdDataBaseSignatureGuid.h,\r
+ #the max size for string PCD should not exceed USHRT_MAX 65535(0xffff).\r
+ #typedef UINT16 SIZE_INFO;\r
+ #//SIZE_INFO SizeTable[];\r
+ if len(vardump.split(",")) > 0xffff:\r
+ EdkLogger.error("build", RESOURCE_OVERFLOW, 'The current length of PCD %s value is %d, it exceeds to the max size of String PCD.' %(".".join([PcdNvStoreDfBuffer.TokenSpaceGuidCName,PcdNvStoreDfBuffer.TokenCName]) ,len(vardump.split(","))))\r
PcdNvStoreDfBuffer.DefaultValue = vardump\r
for skuname in PcdNvStoreDfBuffer.SkuInfoList:\r
PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump\r
PcdValue = Sku.DefaultValue\r
if PcdValue == "":\r
PcdValue = Pcd.DefaultValue\r
- if Sku.VpdOffset != '*':\r
+ if Sku.VpdOffset != TAB_STAR:\r
if PcdValue.startswith("{"):\r
Alignment = 8\r
elif PcdValue.startswith("L"):\r
VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)\r
SkuValueMap[PcdValue].append(Sku)\r
# if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:\r
NeedProcessVpdMapFile = True\r
if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':\r
EdkLogger.error("Build", FILE_NOT_FOUND, \\r
if not FoundFlag :\r
# just pick the a value to determine whether is unicode string type\r
SkuValueMap = {}\r
- SkuObjList = DscPcdEntry.SkuInfoList.items()\r
+ SkuObjList = list(DscPcdEntry.SkuInfoList.items())\r
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)\r
if DefaultSku:\r
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))\r
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]\r
# Only fix the value while no value provided in DSC file.\r
if not Sku.DefaultValue:\r
- DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue\r
+ DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue\r
\r
if DscPcdEntry not in self._DynamicPcdList:\r
self._DynamicPcdList.append(DscPcdEntry)\r
PcdValue = Sku.DefaultValue\r
if PcdValue == "":\r
PcdValue = DscPcdEntry.DefaultValue\r
- if Sku.VpdOffset != '*':\r
+ if Sku.VpdOffset != TAB_STAR:\r
if PcdValue.startswith("{"):\r
Alignment = 8\r
elif PcdValue.startswith("L"):\r
SkuValueMap[PcdValue] = []\r
VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)\r
SkuValueMap[PcdValue].append(Sku)\r
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:\r
NeedProcessVpdMapFile = True\r
if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):\r
UnicodePcdArray.add(DscPcdEntry)\r
self.FixVpdOffset(VpdFile)\r
\r
self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))\r
+ PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]\r
+ if PcdNvStoreDfBuffer:\r
+ PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName\r
+ if (PcdName,PcdGuid) in VpdSkuMap:\r
+ DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)\r
+ VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}\r
\r
# Process VPD map file generated by third party BPDG tool\r
if NeedProcessVpdMapFile:\r
if os.path.exists(VpdMapFilePath):\r
VpdFile.Read(VpdMapFilePath)\r
\r
- # Fixup "*" offset\r
+ # Fixup TAB_STAR offset\r
for pcd in VpdSkuMap:\r
vpdinfo = VpdFile.GetVpdInfo(pcd)\r
if vpdinfo is None:\r
# Delete the DynamicPcdList At the last time enter into this function\r
for Pcd in self._DynamicPcdList:\r
# just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList.values()[0]\r
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
Sku.VpdOffset = Sku.VpdOffset.strip()\r
\r
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
for pcd in self._DynamicPcdList:\r
if len(pcd.SkuInfoList) == 1:\r
for (SkuName, SkuId) in allskuset:\r
- if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:\r
+ if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:\r
continue\r
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r
pcd.SkuInfoList[SkuName].SkuId = SkuId\r
pcd.SkuInfoList[SkuName].SkuIdName = SkuName\r
- self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList\r
\r
def FixVpdOffset(self, VpdFile ):\r
FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)\r
def BuildCommand(self):\r
RetVal = []\r
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:\r
- RetVal += SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r
+ RetVal += _SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r
if "FLAGS" in self.ToolDefinition["MAKE"]:\r
NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()\r
if NewOption != '':\r
- RetVal += SplitOption(NewOption)\r
+ RetVal += _SplitOption(NewOption)\r
if "MAKE" in self.EdkIIBuildOption:\r
if "FLAGS" in self.EdkIIBuildOption["MAKE"]:\r
Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]\r
\r
## Get tool chain definition\r
#\r
- # Get each tool defition for given tool chain from tools_def.txt and platform\r
+ # Get each tool definition for given tool chain from tools_def.txt and platform\r
#\r
@cached_property\r
def ToolDefinition(self):\r
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)\r
ToolsDef += "\n"\r
\r
- SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)\r
+ SaveFileOnChange(self.ToolDefinitionFile, ToolsDef, False)\r
for DllPath in DllPathList:\r
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]\r
os.environ["MAKE_FLAGS"] = MakeFlags\r
return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}\r
\r
## Get list of non-dynamic PCDs\r
- @cached_property\r
+ @property\r
def NonDynamicPcdList(self):\r
- self.CollectPlatformDynamicPcds()\r
+ if not self._NonDynamicPcdList:\r
+ self.CollectPlatformDynamicPcds()\r
return self._NonDynamicPcdList\r
\r
## Get list of dynamic PCDs\r
- @cached_property\r
+ @property\r
def DynamicPcdList(self):\r
- self.CollectPlatformDynamicPcds()\r
+ if not self._DynamicPcdList:\r
+ self.CollectPlatformDynamicPcds()\r
return self._DynamicPcdList\r
\r
## Generate Token Number for all PCD\r
\r
## Override PCD setting (type, value, ...)\r
#\r
- # @param ToPcd The PCD to be overrided\r
- # @param FromPcd The PCD overrideing from\r
+ # @param ToPcd The PCD to be overridden\r
+ # @param FromPcd The PCD overriding from\r
#\r
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):\r
#\r
ToPcd.validateranges = FromPcd.validateranges\r
ToPcd.validlists = FromPcd.validlists\r
ToPcd.expressions = FromPcd.expressions\r
+ ToPcd.CustomAttribute = FromPcd.CustomAttribute\r
\r
if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:\r
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \\r
\r
## Apply PCD setting defined platform to a module\r
#\r
- # @param Module The module from which the PCD setting will be overrided\r
+ # @param Module The module from which the PCD setting will be overridden\r
#\r
# @retval PCD_list The list PCDs with settings from platform\r
#\r
if Module in self.Platform.Modules:\r
PlatformModule = self.Platform.Modules[str(Module)]\r
for Key in PlatformModule.Pcds:\r
+ if GlobalData.BuildOptionPcd:\r
+ for pcd in GlobalData.BuildOptionPcd:\r
+ (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd\r
+ if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":\r
+ PlatformModule.Pcds[Key].DefaultValue = pcdvalue\r
+ PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue\r
+ break\r
Flag = False\r
if Key in Pcds:\r
ToPcd = Pcds[Key]\r
Pcd.MaxDatumSize = str(len(Value.split(',')))\r
else:\r
Pcd.MaxDatumSize = str(len(Value) - 1)\r
- return Pcds.values()\r
+ return list(Pcds.values())\r
\r
- ## Resolve library names to library modules\r
- #\r
- # (for Edk.x modules)\r
- #\r
- # @param Module The module from which the library names will be resolved\r
- #\r
- # @retval library_list The list of library modules\r
- #\r
- def ResolveLibraryReference(self, Module):\r
- EdkLogger.verbose("")\r
- EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))\r
- LibraryConsumerList = [Module]\r
-\r
- # "CompilerStub" is a must for Edk modules\r
- if Module.Libraries:\r
- Module.Libraries.append("CompilerStub")\r
- LibraryList = []\r
- while len(LibraryConsumerList) > 0:\r
- M = LibraryConsumerList.pop()\r
- for LibraryName in M.Libraries:\r
- Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']\r
- if Library is None:\r
- for Key in self.Platform.LibraryClasses.data:\r
- if LibraryName.upper() == Key.upper():\r
- Library = self.Platform.LibraryClasses[Key, ':dummy:']\r
- break\r
- if Library is None:\r
- EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),\r
- ExtraData="\t%s [%s]" % (str(Module), self.Arch))\r
- continue\r
\r
- if Library not in LibraryList:\r
- LibraryList.append(Library)\r
- LibraryConsumerList.append(Library)\r
- EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library)))\r
- return LibraryList\r
\r
## Calculate the priority value of the build option\r
#\r
def CalculatePriorityValue(self, Key):\r
Target, ToolChain, Arch, CommandType, Attr = Key.split('_')\r
PriorityValue = 0x11111\r
- if Target == "*":\r
+ if Target == TAB_STAR:\r
PriorityValue &= 0x01111\r
- if ToolChain == "*":\r
+ if ToolChain == TAB_STAR:\r
PriorityValue &= 0x10111\r
- if Arch == "*":\r
+ if Arch == TAB_STAR:\r
PriorityValue &= 0x11011\r
- if CommandType == "*":\r
+ if CommandType == TAB_STAR:\r
PriorityValue &= 0x11101\r
- if Attr == "*":\r
+ if Attr == TAB_STAR:\r
PriorityValue &= 0x11110\r
\r
return self.PrioList["0x%0.5x" % PriorityValue]\r
if (Key[0] == self.BuildRuleFamily and\r
(ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):\r
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')\r
- if (Target == self.BuildTarget or Target == "*") and\\r
- (ToolChain == self.ToolChain or ToolChain == "*") and\\r
- (Arch == self.Arch or Arch == "*") and\\r
+ if (Target == self.BuildTarget or Target == TAB_STAR) and\\r
+ (ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\\r
+ (Arch == self.Arch or Arch == TAB_STAR) and\\r
Options[Key].startswith("="):\r
\r
if OverrideList.get(Key[1]) is not None:\r
# Use the highest priority value.\r
#\r
if (len(OverrideList) >= 2):\r
- KeyList = OverrideList.keys()\r
+ KeyList = list(OverrideList.keys())\r
for Index in range(len(KeyList)):\r
NowKey = KeyList[Index]\r
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")\r
# Compare two Key, if one is included by another, choose the higher priority one\r
#\r
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")\r
- if (Target1 == Target2 or Target1 == "*" or Target2 == "*") and\\r
- (ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*") and\\r
- (Arch1 == Arch2 or Arch1 == "*" or Arch2 == "*") and\\r
- (CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*") and\\r
- (Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*"):\r
+ if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\\r
+ (ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\\r
+ (Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\\r
+ (CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\\r
+ (Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):\r
\r
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):\r
if Options.get((self.BuildRuleFamily, NextKey)) is not None:\r
continue\r
FamilyMatch = True\r
# expand any wildcard\r
- if Target == "*" or Target == self.BuildTarget:\r
- if Tag == "*" or Tag == self.ToolChain:\r
- if Arch == "*" or Arch == self.Arch:\r
+ if Target == TAB_STAR or Target == self.BuildTarget:\r
+ if Tag == TAB_STAR or Tag == self.ToolChain:\r
+ if Arch == TAB_STAR or Arch == self.Arch:\r
if Tool not in BuildOptions:\r
BuildOptions[Tool] = {}\r
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r
continue\r
\r
# expand any wildcard\r
- if Target == "*" or Target == self.BuildTarget:\r
- if Tag == "*" or Tag == self.ToolChain:\r
- if Arch == "*" or Arch == self.Arch:\r
+ if Target == TAB_STAR or Target == self.BuildTarget:\r
+ if Tag == TAB_STAR or Tag == self.ToolChain:\r
+ if Arch == TAB_STAR or Arch == self.Arch:\r
if Tool not in BuildOptions:\r
BuildOptions[Tool] = {}\r
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r
\r
## Append build options in platform to a module\r
#\r
- # @param Module The module to which the build options will be appened\r
+ # @param Module The module to which the build options will be appended\r
#\r
# @retval options The options appended with build options in platform\r
#\r
def ApplyBuildOption(self, Module):\r
# Get the different options for the different style module\r
- if Module.AutoGenVersion < 0x00010005:\r
- PlatformOptions = self.EdkBuildOption\r
- ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDK_NAME, Module.ModuleType)\r
- else:\r
- PlatformOptions = self.EdkIIBuildOption\r
- ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)\r
+ PlatformOptions = self.EdkIIBuildOption\r
+ ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)\r
ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)\r
ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)\r
if Module in self.Platform.Modules:\r
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r
BuildRuleOrder = Options[Tool][Attr]\r
\r
- AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +\r
- PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +\r
- self.ToolDefinition.keys())\r
+ AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +\r
+ list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +\r
+ list(self.ToolDefinition.keys()))\r
BuildOptions = defaultdict(lambda: defaultdict(str))\r
for Tool in AllTools:\r
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r
else:\r
BuildOptions[Tool][Attr] = mws.handleWsMacro(Value)\r
\r
- if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:\r
- #\r
- # Override UNI flag only for EDK module.\r
- #\r
- BuildOptions['BUILD']['FLAGS'] = self.Workspace.UniFlag\r
return BuildOptions, BuildRuleOrder\r
\r
#\r
# call super().__init__ then call the worker function with different parameter count\r
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
if not hasattr(self, "_Init"):\r
- super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r
self._Init = True\r
\r
self.SourceDir = self.MetaFile.SubDir\r
self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r
\r
- self.SourceOverrideDir = None\r
- # use overrided path defined in DSC file\r
- if self.MetaFile.Key in GlobalData.gOverrideDir:\r
- self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]\r
-\r
self.ToolChain = Toolchain\r
self.BuildTarget = Target\r
self.Arch = Arch\r
def Guid(self):\r
#\r
# To build same module more than once, the module path with FILE_GUID overridden has\r
- # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path\r
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r
# in DSC. The overridden GUID can be retrieved from file name\r
#\r
if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r
self.MetaFile.BaseName\r
))\r
\r
- ## Return the directory to store the intermediate object files of the mdoule\r
+ ## Return the directory to store the intermediate object files of the module\r
@cached_property\r
def OutputDir(self):\r
return _MakeDir((self.BuildDir, "OUTPUT"))\r
return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
return ''\r
\r
- ## Return the directory to store auto-gened source files of the mdoule\r
+ ## Return the directory to store auto-gened source files of the module\r
@cached_property\r
def DebugDir(self):\r
return _MakeDir((self.BuildDir, "DEBUG"))\r
RetVal = {}\r
for Type in self.Module.CustomMakefile:\r
MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r
- if self.SourceOverrideDir is not None:\r
- File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])\r
- if not os.path.exists(File):\r
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
- else:\r
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
+ File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
RetVal[MakeType] = File\r
return RetVal\r
\r
\r
## Get the depex string\r
#\r
- # @return : a string contain all depex expresion.\r
+ # @return : a string contain all depex expression.\r
def _GetDepexExpresionString(self):\r
DepexStr = ''\r
DepexList = []\r
for M in [self.Module] + self.DependentLibraryList:\r
Filename = M.MetaFile.Path\r
InfObj = InfSectionParser.InfSectionParser(Filename)\r
- DepexExpresionList = InfObj.GetDepexExpresionList()\r
- for DepexExpresion in DepexExpresionList:\r
- for key in DepexExpresion:\r
+ DepexExpressionList = InfObj.GetDepexExpresionList()\r
+ for DepexExpression in DepexExpressionList:\r
+ for key in DepexExpression:\r
Arch, ModuleType = key\r
- DepexExpr = [x for x in DepexExpresion[key] if not str(x).startswith('#')]\r
+ DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r
# the type of build module is USER_DEFINED.\r
# All different DEPEX section tags would be copied into the As Built INF file\r
# and there would be separate DEPEX section tags\r
\r
DepexList = []\r
#\r
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r
#\r
for M in [self.Module] + self.DependentLibraryList:\r
Inherited = False\r
if '.' not in item:\r
NewList.append(item)\r
else:\r
- if item not in self._FixedPcdVoidTypeDict:\r
+ FixedVoidTypePcds = {}\r
+ if item in self.FixedVoidTypePcds:\r
+ FixedVoidTypePcds = self.FixedVoidTypePcds\r
+ elif M in self.PlatformInfo.LibraryAutoGenList:\r
+ Index = self.PlatformInfo.LibraryAutoGenList.index(M)\r
+ FixedVoidTypePcds = self.PlatformInfo.LibraryAutoGenList[Index].FixedVoidTypePcds\r
+ if item not in FixedVoidTypePcds:\r
EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r
else:\r
- Value = self._FixedPcdVoidTypeDict[item]\r
+ Value = FixedVoidTypePcds[item]\r
if len(Value.split(',')) != 16:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r
# EDK II modules must not reference header files outside of the packages they depend on or\r
# within the module's directory tree. Report error if violation.\r
#\r
- if self.AutoGenVersion >= 0x00010005:\r
+ if GlobalData.gDisableIncludePathCheck == False:\r
for Path in IncPathList:\r
if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
@cached_property\r
def SourceFileList(self):\r
RetVal = []\r
- ToolChainTagSet = {"", "*", self.ToolChain}\r
- ToolChainFamilySet = {"", "*", self.ToolChainFamily, self.BuildRuleFamily}\r
+ ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r
+ ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r
for F in self.Module.Sources:\r
# match tool chain\r
if F.TagName not in ToolChainTagSet:\r
continue\r
\r
# add the file path into search path list for file including\r
- if F.Dir not in self.IncludePathList and self.AutoGenVersion >= 0x00010005:\r
+ if F.Dir not in self.IncludePathList:\r
self.IncludePathList.insert(0, F.Dir)\r
RetVal.append(F)\r
\r
self.BuildOption\r
for SingleFile in FileList:\r
if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r
- key = SingleFile.Path.split(SingleFile.Ext)[0]\r
+ key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r
if key in Order_Dict:\r
Order_Dict[key].append(SingleFile.Ext)\r
else:\r
def BinaryFileList(self):\r
RetVal = []\r
for F in self.Module.Binaries:\r
- if F.Target not in [TAB_ARCH_COMMON, '*'] and F.Target != self.BuildTarget:\r
+ if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r
continue\r
RetVal.append(F)\r
self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringH)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringIdf)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
IdfGenBinBuffer.close()\r
return RetVal\r
\r
- ## Return the list of library modules explicitly or implicityly used by this module\r
+ ## Return the list of library modules explicitly or implicitly used by this module\r
@cached_property\r
def DependentLibraryList(self):\r
# only merge library classes and PCD for non-library module\r
if self.IsLibrary:\r
return []\r
- if self.AutoGenVersion < 0x00010005:\r
- return self.PlatformInfo.ResolveLibraryReference(self.Module)\r
return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r
\r
## Get the list of PCDs from current module\r
@cached_property\r
def IncludePathList(self):\r
RetVal = []\r
- if self.AutoGenVersion < 0x00010005:\r
- for Inc in self.Module.Includes:\r
- if Inc not in RetVal:\r
- RetVal.append(Inc)\r
- # for Edk modules\r
- Inc = path.join(Inc, self.Arch.capitalize())\r
- if os.path.exists(Inc) and Inc not in RetVal:\r
- RetVal.append(Inc)\r
- # Edk module needs to put DEBUG_DIR at the end of search path and not to use SOURCE_DIR all the time\r
- RetVal.append(self.DebugDir)\r
- else:\r
- RetVal.append(self.MetaFile.Dir)\r
- RetVal.append(self.DebugDir)\r
+ RetVal.append(self.MetaFile.Dir)\r
+ RetVal.append(self.DebugDir)\r
\r
for Package in self.Module.Packages:\r
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r
if not Guid:\r
break\r
- NameArray = ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
+ NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r
Pos = Content.find('efivarstore', Name.end())\r
if not NameGuids:\r
Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r
if not Value:\r
continue\r
- Name = ConvertStringToByteArray(SkuInfo.VariableName)\r
+ Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r
Guid = GuidStructureStringToGuidString(Value)\r
if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r
HiiExPcds.append(Pcd)\r
return None\r
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r
- VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())\r
+ VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r
if not VfrUniOffsetList:\r
return None\r
\r
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# GUID + Offset\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
- UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
+ fStringIO.write(UniGuid)\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# GUID + Offset\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
- VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
+ fStringIO.write(VfrGuid)\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
#\r
if self.IsAsBuiltInfCreated:\r
return\r
\r
- # Skip the following code for EDK I inf\r
- if self.AutoGenVersion < 0x00010005:\r
- return\r
-\r
# Skip the following code for libraries\r
if self.IsLibrary:\r
return\r
AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r
else:\r
AsBuiltInfDict['binary_item'].append('BIN|' + File)\r
+ if not self.DepexGenerated:\r
+ DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r
+ if os.path.exists(DepexFile):\r
+ self.DepexGenerated = True\r
if self.DepexGenerated:\r
self.OutputFile.add(self.Name + '.depex')\r
if self.ModuleType in [SUP_MODULE_PEIM]:\r
Padding = '0x00, '\r
if Unicode:\r
Padding = Padding * 2\r
- ArraySize = ArraySize / 2\r
+ ArraySize = ArraySize // 2\r
if ArraySize < (len(PcdValue) + 1):\r
if Pcd.MaxSizeUserSet:\r
EdkLogger.error("build", AUTOGEN_ERROR,\r
AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
\r
# Generated depex expression section in comments.\r
- DepexExpresion = self._GetDepexExpresionString()\r
- AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''\r
+ DepexExpression = self._GetDepexExpresionString()\r
+ AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r
\r
AsBuiltInf = TemplateString()\r
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r
\r
for File in self.AutoGenFileList:\r
if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r
- #Ignore Edk AutoGen.c\r
- if self.AutoGenVersion < 0x00010005 and File.Name == 'AutoGen.c':\r
- continue\r
-\r
AutoGenList.append(str(File))\r
else:\r
IgoredAutoGenList.append(str(File))\r
\r
- # Skip the following code for EDK I inf\r
- if self.AutoGenVersion < 0x00010005:\r
- return\r
\r
for ModuleType in self.DepexList:\r
# Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r
GlobalData.gModuleHash[self.Arch] = {}\r
m = hashlib.md5()\r
# Add Platform level hash\r
- m.update(GlobalData.gPlatformHash)\r
+ m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r
- m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r
+ m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r
+ m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'r')\r
+ f = open(str(self.MetaFile), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
# Add Module's source files\r
if self.SourceFileList:\r
for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'r')\r
+ f = open(str(File), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
if GlobalData.gBinCacheSource:\r
if self.AttemptModuleCacheCopy():\r
return False\r
- return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)\r
+ return SaveFileOnChange(ModuleHashFile, m.hexdigest(), False)\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
def CanSkipbyHash(self):\r