#\r
from __future__ import print_function\r
from __future__ import absolute_import\r
-import Common.LongFilePathOs as os\r
-import re\r
-import os.path as path\r
-import copy\r
-import uuid\r
-\r
-from . import GenC\r
-from . import GenMake\r
-from . import GenDepex\r
-from io import BytesIO\r
-\r
-from .StrGather import *\r
-from .BuildEngine import BuildRuleObj as BuildRule\r
-from .BuildEngine import gDefaultBuildRuleFile,AutoGenReqBuildRuleVerNum\r
-import shutil\r
-from Common.LongFilePathSupport import CopyLongFilePath\r
-from Common.BuildToolError import *\r
-from Common.DataType import *\r
-from Common.Misc import *\r
-from Common.StringUtils import *\r
-import Common.GlobalData as GlobalData\r
-from GenFds.FdfParser import *\r
-from CommonDataClass.CommonClass import SkuInfoClass\r
-from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r
-import Common.VpdInfoFile as VpdInfoFile\r
-from .GenPcdDb import CreatePcdDatabaseCode\r
-from Workspace.MetaFileCommentParser import UsageList\r
-from Workspace.WorkspaceCommon import GetModuleLibInstances\r
-from Common.MultipleWorkspace import MultipleWorkspace as mws\r
-from . import InfSectionParser\r
-import datetime\r
-import hashlib\r
-from .GenVar import VariableMgr, var_info\r
-from collections import OrderedDict\r
-from collections import defaultdict\r
-from Workspace.WorkspaceCommon import OrderedListDict\r
-from Common.ToolDefClassObject import gDefaultToolsDefFile\r
-\r
-from Common.caching import cached_property, cached_class_function\r
-\r
-## Regular expression for splitting Dependency Expression string into tokens\r
-gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")\r
-\r
-## Regular expression for match: PCD(xxxx.yyy)\r
-gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")\r
-\r
-#\r
-# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
-# is the former use /I , the Latter used -I to specify include directories\r
-#\r
-gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
-gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
-\r
-#\r
-# Match name = variable\r
-#\r
-gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r
-#\r
-# The format of guid in efivarstore statement likes following and must be correct:\r
-# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r
-#\r
-gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r
-\r
-## Mapping Makefile type\r
-gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
-\r
-\r
-## default file name for AutoGen\r
-gAutoGenCodeFileName = "AutoGen.c"\r
-gAutoGenHeaderFileName = "AutoGen.h"\r
-gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r
-gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r
-gAutoGenDepexFileName = "%(module_name)s.depex"\r
-gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r
-gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r
-gInfSpecVersion = "0x00010017"\r
-\r
-#\r
-# Template string to generic AsBuilt INF\r
-#\r
-gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r
-\r
-# DO NOT EDIT\r
-# FILE auto-generated\r
-\r
-[Defines]\r
- INF_VERSION = ${module_inf_version}\r
- BASE_NAME = ${module_name}\r
- FILE_GUID = ${module_guid}\r
- MODULE_TYPE = ${module_module_type}${BEGIN}\r
- VERSION_STRING = ${module_version_string}${END}${BEGIN}\r
- PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r
- UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r
- PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r
- ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r
- UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r
- CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r
- DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r
- SHADOW = ${module_shadow}${END}${BEGIN}\r
- PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r
- PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r
- PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r
- PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r
- BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r
- SPEC = ${module_spec}${END}${BEGIN}\r
- UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r
- MODULE_UNI_FILE = ${module_uni_file}${END}\r
-\r
-[Packages.${module_arch}]${BEGIN}\r
- ${package_item}${END}\r
-\r
-[Binaries.${module_arch}]${BEGIN}\r
- ${binary_item}${END}\r
-\r
-[PatchPcd.${module_arch}]${BEGIN}\r
- ${patchablepcd_item}\r
-${END}\r
-\r
-[Protocols.${module_arch}]${BEGIN}\r
- ${protocol_item}\r
-${END}\r
-\r
-[Ppis.${module_arch}]${BEGIN}\r
- ${ppi_item}\r
-${END}\r
-\r
-[Guids.${module_arch}]${BEGIN}\r
- ${guid_item}\r
-${END}\r
-\r
-[PcdEx.${module_arch}]${BEGIN}\r
- ${pcd_item}\r
-${END}\r
-\r
-[LibraryClasses.${module_arch}]\r
-## @LIB_INSTANCES${BEGIN}\r
-# ${libraryclasses_item}${END}\r
-\r
-${depexsection_item}\r
-\r
-${userextension_tianocore_item}\r
-\r
-${tail_comments}\r
-\r
-[BuildOptions.${module_arch}]\r
-## @AsBuilt${BEGIN}\r
-## ${flags_item}${END}\r
-""")\r
-## Split command line option string to list\r
-#\r
-# subprocess.Popen needs the args to be a sequence. Otherwise there's problem\r
-# in non-windows platform to launch command\r
-#\r
-def _SplitOption(OptionString):\r
- OptionList = []\r
- LastChar = " "\r
- OptionStart = 0\r
- QuotationMark = ""\r
- for Index in range(0, len(OptionString)):\r
- CurrentChar = OptionString[Index]\r
- if CurrentChar in ['"', "'"]:\r
- if QuotationMark == CurrentChar:\r
- QuotationMark = ""\r
- elif QuotationMark == "":\r
- QuotationMark = CurrentChar\r
- continue\r
- elif QuotationMark:\r
- continue\r
-\r
- if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:\r
- if Index > OptionStart:\r
- OptionList.append(OptionString[OptionStart:Index - 1])\r
- OptionStart = Index\r
- LastChar = CurrentChar\r
- OptionList.append(OptionString[OptionStart:])\r
- return OptionList\r
-\r
-#\r
-# Convert string to C format array\r
-#\r
-def _ConvertStringToByteArray(Value):\r
- Value = Value.strip()\r
- if not Value:\r
- return None\r
- if Value[0] == '{':\r
- if not Value.endswith('}'):\r
- return None\r
- Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r
- ValFields = Value.split(',')\r
- try:\r
- for Index in range(len(ValFields)):\r
- ValFields[Index] = str(int(ValFields[Index], 0))\r
- except ValueError:\r
- return None\r
- Value = '{' + ','.join(ValFields) + '}'\r
- return Value\r
-\r
- Unicode = False\r
- if Value.startswith('L"'):\r
- if not Value.endswith('"'):\r
- return None\r
- Value = Value[1:]\r
- Unicode = True\r
- elif not Value.startswith('"') or not Value.endswith('"'):\r
- return None\r
-\r
- Value = eval(Value) # translate escape character\r
- NewValue = '{'\r
- for Index in range(0, len(Value)):\r
- if Unicode:\r
- NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r
- else:\r
- NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r
- Value = NewValue + '0}'\r
- return Value\r
-\r
+from Common.DataType import TAB_STAR\r
## Base class for AutoGen\r
#\r
# This class just implements the cache mechanism of AutoGen objects.\r
# @param *args The specific class related parameters\r
# @param **kwargs The specific class related dict parameters\r
#\r
+\r
def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
# check if the object has been created\r
Key = (Target, Toolchain, Arch, MetaFile)\r
def __eq__(self, Other):\r
return Other and self.MetaFile == Other\r
\r
-## Workspace AutoGen class\r
-#\r
-# This class is used mainly to control the whole platform build for different\r
-# architecture. This class will generate top level makefile.\r
-#\r
-class WorkspaceAutoGen(AutoGen):\r
- # call super().__init__ then call the worker function with different parameter count\r
- def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
- if not hasattr(self, "_Init"):\r
- self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
- self._Init = True\r
-\r
- ## Initialize WorkspaceAutoGen\r
- #\r
- # @param WorkspaceDir Root directory of workspace\r
- # @param ActivePlatform Meta-file of active platform\r
- # @param Target Build target\r
- # @param Toolchain Tool chain name\r
- # @param ArchList List of architecture of current build\r
- # @param MetaFileDb Database containing meta-files\r
- # @param BuildConfig Configuration of build\r
- # @param ToolDefinition Tool chain definitions\r
- # @param FlashDefinitionFile File of flash definition\r
- # @param Fds FD list to be generated\r
- # @param Fvs FV list to be generated\r
- # @param Caps Capsule list to be generated\r
- # @param SkuId SKU id from command line\r
- #\r
- def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,\r
- BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,\r
- Progress=None, BuildModule=None):\r
- self.BuildDatabase = MetaFileDb\r
- self.MetaFile = ActivePlatform\r
- self.WorkspaceDir = WorkspaceDir\r
- self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]\r
- GlobalData.gActivePlatform = self.Platform\r
- self.BuildTarget = Target\r
- self.ToolChain = Toolchain\r
- self.ArchList = ArchList\r
- self.SkuId = SkuId\r
- self.UniFlag = UniFlag\r
-\r
- self.TargetTxt = BuildConfig\r
- self.ToolDef = ToolDefinition\r
- self.FdfFile = FlashDefinitionFile\r
- self.FdTargetList = Fds if Fds else []\r
- self.FvTargetList = Fvs if Fvs else []\r
- self.CapTargetList = Caps if Caps else []\r
- self.AutoGenObjectList = []\r
- self._GuidDict = {}\r
-\r
- # there's many relative directory operations, so ...\r
- os.chdir(self.WorkspaceDir)\r
-\r
- self.MergeArch()\r
- self.ValidateBuildTarget()\r
-\r
- EdkLogger.info("")\r
- if self.ArchList:\r
- EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))\r
- EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))\r
- EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))\r
-\r
- EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))\r
- if BuildModule:\r
- EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))\r
-\r
- if self.FdfFile:\r
- EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))\r
-\r
- EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)\r
-\r
- if Progress:\r
- Progress.Start("\nProcessing meta-data")\r
- #\r
- # Mark now build in AutoGen Phase\r
- #\r
- GlobalData.gAutoGenPhase = True\r
- self.ProcessModuleFromPdf()\r
- self.ProcessPcdType()\r
- self.ProcessMixedPcd()\r
- self.GetPcdsFromFDF()\r
- self.CollectAllPcds()\r
- self.GeneratePkgLevelHash()\r
- #\r
- # Check PCDs token value conflict in each DEC file.\r
- #\r
- self._CheckAllPcdsTokenValueConflict()\r
- #\r
- # Check PCD type and definition between DSC and DEC\r
- #\r
- self._CheckPcdDefineAndType()\r
-\r
- self.CreateBuildOptionsFile()\r
- self.CreatePcdTokenNumberFile()\r
- self.CreateModuleHashInfo()\r
- GlobalData.gAutoGenPhase = False\r
-\r
- #\r
- # Merge Arch\r
- #\r
- def MergeArch(self):\r
- if not self.ArchList:\r
- ArchList = set(self.Platform.SupArchList)\r
- else:\r
- ArchList = set(self.ArchList) & set(self.Platform.SupArchList)\r
- if not ArchList:\r
- EdkLogger.error("build", PARAMETER_INVALID,\r
- ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))\r
- elif self.ArchList and len(ArchList) != len(self.ArchList):\r
- SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))\r
- EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"\r
- % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))\r
- self.ArchList = tuple(ArchList)\r
-\r
- # Validate build target\r
- def ValidateBuildTarget(self):\r
- if self.BuildTarget not in self.Platform.BuildTargets:\r
- EdkLogger.error("build", PARAMETER_INVALID,\r
- ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"\r
- % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))\r
- @cached_property\r
- def FdfProfile(self):\r
- if not self.FdfFile:\r
- self.FdfFile = self.Platform.FlashDefinition\r
-\r
- FdfProfile = None\r
- if self.FdfFile:\r
- Fdf = FdfParser(self.FdfFile.Path)\r
- Fdf.ParseFile()\r
- GlobalData.gFdfParser = Fdf\r
- if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:\r
- FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]\r
- for FdRegion in FdDict.RegionList:\r
- if str(FdRegion.RegionType) is 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):\r
- if int(FdRegion.Offset) % 8 != 0:\r
- EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))\r
- FdfProfile = Fdf.Profile\r
- else:\r
- if self.FdTargetList:\r
- EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))\r
- self.FdTargetList = []\r
- if self.FvTargetList:\r
- EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))\r
- self.FvTargetList = []\r
- if self.CapTargetList:\r
- EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))\r
- self.CapTargetList = []\r
-\r
- return FdfProfile\r
-\r
- def ProcessModuleFromPdf(self):\r
-\r
- if self.FdfProfile:\r
- for fvname in self.FvTargetList:\r
- if fvname.upper() not in self.FdfProfile.FvDict:\r
- EdkLogger.error("build", OPTION_VALUE_INVALID,\r
- "No such an FV in FDF file: %s" % fvname)\r
-\r
- # In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,\r
- # but the path (self.MetaFile.Path) is the real path\r
- for key in self.FdfProfile.InfDict:\r
- if key == 'ArchTBD':\r
- MetaFile_cache = defaultdict(set)\r
- for Arch in self.ArchList:\r
- Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]\r
- for Pkey in Current_Platform_cache.Modules:\r
- MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)\r
- for Inf in self.FdfProfile.InfDict[key]:\r
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r
- for Arch in self.ArchList:\r
- if ModuleFile in MetaFile_cache[Arch]:\r
- break\r
- else:\r
- ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]\r
- if not ModuleData.IsBinaryModule:\r
- EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)\r
-\r
- else:\r
- for Arch in self.ArchList:\r
- if Arch == key:\r
- Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]\r
- MetaFileList = set()\r
- for Pkey in Platform.Modules:\r
- MetaFileList.add(Platform.Modules[Pkey].MetaFile)\r
- for Inf in self.FdfProfile.InfDict[key]:\r
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r
- if ModuleFile in MetaFileList:\r
- continue\r
- ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]\r
- if not ModuleData.IsBinaryModule:\r
- EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)\r
-\r
-\r
-\r
- # parse FDF file to get PCDs in it, if any\r
- def GetPcdsFromFDF(self):\r
-\r
- if self.FdfProfile:\r
- PcdSet = self.FdfProfile.PcdDict\r
- # handle the mixed pcd in FDF file\r
- for key in PcdSet:\r
- if key in GlobalData.MixedPcd:\r
- Value = PcdSet[key]\r
- del PcdSet[key]\r
- for item in GlobalData.MixedPcd[key]:\r
- PcdSet[item] = Value\r
- self.VerifyPcdDeclearation(PcdSet)\r
-\r
- def ProcessPcdType(self):\r
- for Arch in self.ArchList:\r
- Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]\r
- Platform.Pcds\r
- # generate the SourcePcdDict and BinaryPcdDict\r
- PGen = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)\r
- for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
- if BuildData.Arch != Arch:\r
- continue\r
- if BuildData.MetaFile.Ext == '.inf':\r
- for key in BuildData.Pcds:\r
- if BuildData.Pcds[key].Pending:\r
- if key in Platform.Pcds:\r
- PcdInPlatform = Platform.Pcds[key]\r
- if PcdInPlatform.Type:\r
- BuildData.Pcds[key].Type = PcdInPlatform.Type\r
- BuildData.Pcds[key].Pending = False\r
-\r
- if BuildData.MetaFile in Platform.Modules:\r
- PlatformModule = Platform.Modules[str(BuildData.MetaFile)]\r
- if key in PlatformModule.Pcds:\r
- PcdInPlatform = PlatformModule.Pcds[key]\r
- if PcdInPlatform.Type:\r
- BuildData.Pcds[key].Type = PcdInPlatform.Type\r
- BuildData.Pcds[key].Pending = False\r
- else:\r
- #Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending\r
- if BuildData.Pcds[key].Pending:\r
- MGen = ModuleAutoGen(self, BuildData.MetaFile, self.BuildTarget, self.ToolChain, Arch, self.MetaFile)\r
- if MGen and MGen.IsLibrary:\r
- if MGen in PGen.LibraryAutoGenList:\r
- ReferenceModules = MGen.ReferenceModules\r
- for ReferenceModule in ReferenceModules:\r
- if ReferenceModule.MetaFile in Platform.Modules:\r
- RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]\r
- if key in RefPlatformModule.Pcds:\r
- PcdInReferenceModule = RefPlatformModule.Pcds[key]\r
- if PcdInReferenceModule.Type:\r
- BuildData.Pcds[key].Type = PcdInReferenceModule.Type\r
- BuildData.Pcds[key].Pending = False\r
- break\r
-\r
- def ProcessMixedPcd(self):\r
- for Arch in self.ArchList:\r
- SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}\r
- BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}\r
- SourcePcdDict_Keys = SourcePcdDict.keys()\r
- BinaryPcdDict_Keys = BinaryPcdDict.keys()\r
-\r
- # generate the SourcePcdDict and BinaryPcdDict\r
- PGen = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)\r
- for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
- if BuildData.Arch != Arch:\r
- continue\r
- if BuildData.MetaFile.Ext == '.inf':\r
- for key in BuildData.Pcds:\r
- if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:\r
- if BuildData.IsBinaryModule:\r
- BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r
- else:\r
- SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r
-\r
- elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:\r
- if BuildData.MetaFile.Ext == '.inf':\r
- if BuildData.IsBinaryModule:\r
- BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r
- else:\r
- SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r
-\r
- elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:\r
- SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r
- elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:\r
- SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r
-\r
- #\r
- # A PCD can only use one type for all source modules\r
- #\r
- for i in SourcePcdDict_Keys:\r
- for j in SourcePcdDict_Keys:\r
- if i != j:\r
- Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])\r
- if len(Intersections) > 0:\r
- EdkLogger.error(\r
- 'build',\r
- FORMAT_INVALID,\r
- "Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),\r
- ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)\r
- )\r
-\r
- #\r
- # intersection the BinaryPCD for Mixed PCD\r
- #\r
- for i in BinaryPcdDict_Keys:\r
- for j in BinaryPcdDict_Keys:\r
- if i != j:\r
- Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])\r
- for item in Intersections:\r
- NewPcd1 = (item[0] + '_' + i, item[1])\r
- NewPcd2 = (item[0] + '_' + j, item[1])\r
- if item not in GlobalData.MixedPcd:\r
- GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]\r
- else:\r
- if NewPcd1 not in GlobalData.MixedPcd[item]:\r
- GlobalData.MixedPcd[item].append(NewPcd1)\r
- if NewPcd2 not in GlobalData.MixedPcd[item]:\r
- GlobalData.MixedPcd[item].append(NewPcd2)\r
-\r
- #\r
- # intersection the SourcePCD and BinaryPCD for Mixed PCD\r
- #\r
- for i in SourcePcdDict_Keys:\r
- for j in BinaryPcdDict_Keys:\r
- if i != j:\r
- Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])\r
- for item in Intersections:\r
- NewPcd1 = (item[0] + '_' + i, item[1])\r
- NewPcd2 = (item[0] + '_' + j, item[1])\r
- if item not in GlobalData.MixedPcd:\r
- GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]\r
- else:\r
- if NewPcd1 not in GlobalData.MixedPcd[item]:\r
- GlobalData.MixedPcd[item].append(NewPcd1)\r
- if NewPcd2 not in GlobalData.MixedPcd[item]:\r
- GlobalData.MixedPcd[item].append(NewPcd2)\r
-\r
- for BuildData in list(PGen.BuildDatabase._CACHE_.values()):\r
- if BuildData.Arch != Arch:\r
- continue\r
- for key in BuildData.Pcds:\r
- for SinglePcd in GlobalData.MixedPcd:\r
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:\r
- for item in GlobalData.MixedPcd[SinglePcd]:\r
- Pcd_Type = item[0].split('_')[-1]\r
- if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \\r
- (Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):\r
- Value = BuildData.Pcds[key]\r
- Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type\r
- if len(key) == 2:\r
- newkey = (Value.TokenCName, key[1])\r
- elif len(key) == 3:\r
- newkey = (Value.TokenCName, key[1], key[2])\r
- del BuildData.Pcds[key]\r
- BuildData.Pcds[newkey] = Value\r
- break\r
- break\r
-\r
- #Collect package set information from INF of FDF\r
- @cached_property\r
- def PkgSet(self):\r
- if not self.FdfFile:\r
- self.FdfFile = self.Platform.FlashDefinition\r
-\r
- if self.FdfFile:\r
- ModuleList = self.FdfProfile.InfList\r
- else:\r
- ModuleList = []\r
- Pkgs = {}\r
- for Arch in self.ArchList:\r
- Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]\r
- PGen = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)\r
- PkgSet = set()\r
- for Inf in ModuleList:\r
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r
- if ModuleFile in Platform.Modules:\r
- continue\r
- ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]\r
- PkgSet.update(ModuleData.Packages)\r
- Pkgs[Arch] = list(PkgSet) + list(PGen.PackageList)\r
- return Pkgs\r
-\r
- def VerifyPcdDeclearation(self,PcdSet):\r
- for Arch in self.ArchList:\r
- Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]\r
- Pkgs = self.PkgSet[Arch]\r
- DecPcds = set()\r
- DecPcdsKey = set()\r
- for Pkg in Pkgs:\r
- for Pcd in Pkg.Pcds:\r
- DecPcds.add((Pcd[0], Pcd[1]))\r
- DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))\r
-\r
- Platform.SkuName = self.SkuId\r
- for Name, Guid,Fileds in PcdSet:\r
- if (Name, Guid) not in DecPcds:\r
- EdkLogger.error(\r
- 'build',\r
- PARSER_ERROR,\r
- "PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),\r
- File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],\r
- Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]\r
- )\r
- else:\r
- # Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.\r
- if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \\r
- or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \\r
- or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:\r
- continue\r
- elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:\r
- EdkLogger.error(\r
- 'build',\r
- PARSER_ERROR,\r
- "Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),\r
- File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],\r
- Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]\r
- )\r
- def CollectAllPcds(self):\r
-\r
- for Arch in self.ArchList:\r
- Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)\r
- #\r
- # Explicitly collect platform's dynamic PCDs\r
- #\r
- Pa.CollectPlatformDynamicPcds()\r
- Pa.CollectFixedAtBuildPcds()\r
- self.AutoGenObjectList.append(Pa)\r
-\r
- #\r
- # Generate Package level hash value\r
- #\r
- def GeneratePkgLevelHash(self):\r
- for Arch in self.ArchList:\r
- GlobalData.gPackageHash = {}\r
- if GlobalData.gUseHashCache:\r
- for Pkg in self.PkgSet[Arch]:\r
- self._GenPkgLevelHash(Pkg)\r
-\r
-\r
- def CreateBuildOptionsFile(self):\r
- #\r
- # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.\r
- #\r
- content = 'gCommandLineDefines: '\r
- content += str(GlobalData.gCommandLineDefines)\r
- content += TAB_LINE_BREAK\r
- content += 'BuildOptionPcd: '\r
- content += str(GlobalData.BuildOptionPcd)\r
- content += TAB_LINE_BREAK\r
- content += 'Active Platform: '\r
- content += str(self.Platform)\r
- content += TAB_LINE_BREAK\r
- if self.FdfFile:\r
- content += 'Flash Image Definition: '\r
- content += str(self.FdfFile)\r
- content += TAB_LINE_BREAK\r
- SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)\r
-\r
- def CreatePcdTokenNumberFile(self):\r
- #\r
- # Create PcdToken Number file for Dynamic/DynamicEx Pcd.\r
- #\r
- PcdTokenNumber = 'PcdTokenNumber: '\r
- Pa = self.AutoGenObjectList[0]\r
- if Pa.PcdTokenNumber:\r
- if Pa.DynamicPcdList:\r
- for Pcd in Pa.DynamicPcdList:\r
- PcdTokenNumber += TAB_LINE_BREAK\r
- PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))\r
- PcdTokenNumber += ' : '\r
- PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])\r
- SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)\r
-\r
- def CreateModuleHashInfo(self):\r
- #\r
- # Get set of workspace metafiles\r
- #\r
- AllWorkSpaceMetaFiles = self._GetMetaFiles(self.BuildTarget, self.ToolChain)\r
-\r
- #\r
- # Retrieve latest modified time of all metafiles\r
- #\r
- SrcTimeStamp = 0\r
- for f in AllWorkSpaceMetaFiles:\r
- if os.stat(f)[8] > SrcTimeStamp:\r
- SrcTimeStamp = os.stat(f)[8]\r
- self._SrcTimeStamp = SrcTimeStamp\r
-\r
- if GlobalData.gUseHashCache:\r
- m = hashlib.md5()\r
- for files in AllWorkSpaceMetaFiles:\r
- if files.endswith('.dec'):\r
- continue\r
- f = open(files, 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
- SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)\r
- GlobalData.gPlatformHash = m.hexdigest()\r
-\r
- #\r
- # Write metafile list to build directory\r
- #\r
- AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')\r
- if os.path.exists (AutoGenFilePath):\r
- os.remove(AutoGenFilePath)\r
- if not os.path.exists(self.BuildDir):\r
- os.makedirs(self.BuildDir)\r
- with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:\r
- for f in AllWorkSpaceMetaFiles:\r
- print(f, file=file)\r
- return True\r
-\r
- def _GenPkgLevelHash(self, Pkg):\r
- if Pkg.PackageName in GlobalData.gPackageHash:\r
- return\r
-\r
- PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName)\r
- CreateDirectory(PkgDir)\r
- HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r
- m = hashlib.md5()\r
- # Get .dec file's hash value\r
- f = open(Pkg.MetaFile.Path, 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
- # Get include files hash value\r
- if Pkg.Includes:\r
- for inc in sorted(Pkg.Includes, key=lambda x: str(x)):\r
- for Root, Dirs, Files in os.walk(str(inc)):\r
- for File in sorted(Files):\r
- File_Path = os.path.join(Root, File)\r
- f = open(File_Path, 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
- SaveFileOnChange(HashFile, m.hexdigest(), False)\r
- GlobalData.gPackageHash[Pkg.PackageName] = m.hexdigest()\r
-\r
- def _GetMetaFiles(self, Target, Toolchain):\r
- AllWorkSpaceMetaFiles = set()\r
- #\r
- # add fdf\r
- #\r
- if self.FdfFile:\r
- AllWorkSpaceMetaFiles.add (self.FdfFile.Path)\r
- for f in GlobalData.gFdfParser.GetAllIncludedFile():\r
- AllWorkSpaceMetaFiles.add (f.FileName)\r
- #\r
- # add dsc\r
- #\r
- AllWorkSpaceMetaFiles.add(self.MetaFile.Path)\r
-\r
- #\r
- # add build_rule.txt & tools_def.txt\r
- #\r
- AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))\r
- AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))\r
-\r
- # add BuildOption metafile\r
- #\r
- AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))\r
-\r
- # add PcdToken Number file for Dynamic/DynamicEx Pcd\r
- #\r
- AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))\r
-\r
- for Pa in self.AutoGenObjectList:\r
- AllWorkSpaceMetaFiles.add(Pa.ToolDefinitionFile)\r
-\r
- for Arch in self.ArchList:\r
- #\r
- # add dec\r
- #\r
- for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:\r
- AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)\r
-\r
- #\r
- # add included dsc\r
- #\r
- for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:\r
- AllWorkSpaceMetaFiles.add(filePath.Path)\r
-\r
- return AllWorkSpaceMetaFiles\r
-\r
- def _CheckPcdDefineAndType(self):\r
- PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,\r
- TAB_PCDS_PATCHABLE_IN_MODULE,\r
- TAB_PCDS_FEATURE_FLAG,\r
- TAB_PCDS_DYNAMIC,\r
- TAB_PCDS_DYNAMIC_EX}\r
-\r
- # This dict store PCDs which are not used by any modules with specified arches\r
- UnusedPcd = OrderedDict()\r
- for Pa in self.AutoGenObjectList:\r
- # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid\r
- for Pcd in Pa.Platform.Pcds:\r
- PcdType = Pa.Platform.Pcds[Pcd].Type\r
-\r
- # If no PCD type, this PCD comes from FDF\r
- if not PcdType:\r
- continue\r
-\r
- # Try to remove Hii and Vpd suffix\r
- if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):\r
- PcdType = TAB_PCDS_DYNAMIC_EX\r
- elif PcdType.startswith(TAB_PCDS_DYNAMIC):\r
- PcdType = TAB_PCDS_DYNAMIC\r
-\r
- for Package in Pa.PackageList:\r
- # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType\r
- if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:\r
- break\r
- for Type in PcdTypeSet:\r
- if (Pcd[0], Pcd[1], Type) in Package.Pcds:\r
- EdkLogger.error(\r
- 'build',\r
- FORMAT_INVALID,\r
- "Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \\r
- % (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),\r
- ExtraData=None\r
- )\r
- return\r
- else:\r
- UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)\r
-\r
- for Pcd in UnusedPcd:\r
- EdkLogger.warn(\r
- 'build',\r
- "The PCD was not specified by any INF module in the platform for the given architecture.\n"\r
- "\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"\r
- % (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),\r
- ExtraData=None\r
- )\r
-\r
- def __repr__(self):\r
- return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))\r
-\r
- ## Return the directory to store FV files\r
- @cached_property\r
- def FvDir(self):\r
- return path.join(self.BuildDir, TAB_FV_DIRECTORY)\r
-\r
- ## Return the directory to store all intermediate and final files built\r
- @cached_property\r
- def BuildDir(self):\r
- return self.AutoGenObjectList[0].BuildDir\r
-\r
- ## Return the build output directory platform specifies\r
- @cached_property\r
- def OutputDir(self):\r
- return self.Platform.OutputDirectory\r
-\r
- ## Return platform name\r
- @cached_property\r
- def Name(self):\r
- return self.Platform.PlatformName\r
-\r
- ## Return meta-file GUID\r
- @cached_property\r
- def Guid(self):\r
- return self.Platform.Guid\r
-\r
- ## Return platform version\r
- @cached_property\r
- def Version(self):\r
- return self.Platform.Version\r
-\r
- ## Return paths of tools\r
- @cached_property\r
- def ToolDefinition(self):\r
- return self.AutoGenObjectList[0].ToolDefinition\r
-\r
- ## Return directory of platform makefile\r
- #\r
- # @retval string Makefile directory\r
- #\r
- @cached_property\r
- def MakeFileDir(self):\r
- return self.BuildDir\r
-\r
- ## Return build command string\r
- #\r
- # @retval string Build command string\r
- #\r
- @cached_property\r
- def BuildCommand(self):\r
- # BuildCommand should be all the same. So just get one from platform AutoGen\r
- return self.AutoGenObjectList[0].BuildCommand\r
-\r
- ## Check the PCDs token value conflict in each DEC file.\r
- #\r
- # Will cause build break and raise error message while two PCDs conflict.\r
- #\r
- # @return None\r
- #\r
- def _CheckAllPcdsTokenValueConflict(self):\r
- for Pa in self.AutoGenObjectList:\r
- for Package in Pa.PackageList:\r
- PcdList = list(Package.Pcds.values())\r
- PcdList.sort(key=lambda x: int(x.TokenValue, 0))\r
- Count = 0\r
- while (Count < len(PcdList) - 1) :\r
- Item = PcdList[Count]\r
- ItemNext = PcdList[Count + 1]\r
- #\r
- # Make sure in the same token space the TokenValue should be unique\r
- #\r
- if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):\r
- SameTokenValuePcdList = []\r
- SameTokenValuePcdList.append(Item)\r
- SameTokenValuePcdList.append(ItemNext)\r
- RemainPcdListLength = len(PcdList) - Count - 2\r
- for ValueSameCount in range(RemainPcdListLength):\r
- if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):\r
- SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])\r
- else:\r
- break;\r
- #\r
- # Sort same token value PCD list with TokenGuid and TokenCName\r
- #\r
- SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r
- SameTokenValuePcdListCount = 0\r
- while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):\r
- Flag = False\r
- TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]\r
- TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]\r
-\r
- if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):\r
- for PcdItem in GlobalData.MixedPcd:\r
- if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \\r
- (TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
- Flag = True\r
- if not Flag:\r
- EdkLogger.error(\r
- 'build',\r
- FORMAT_INVALID,\r
- "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\\r
- % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),\r
- ExtraData=None\r
- )\r
- SameTokenValuePcdListCount += 1\r
- Count += SameTokenValuePcdListCount\r
- Count += 1\r
-\r
- PcdList = list(Package.Pcds.values())\r
- PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r
- Count = 0\r
- while (Count < len(PcdList) - 1) :\r
- Item = PcdList[Count]\r
- ItemNext = PcdList[Count + 1]\r
- #\r
- # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.\r
- #\r
- if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):\r
- EdkLogger.error(\r
- 'build',\r
- FORMAT_INVALID,\r
- "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\\r
- % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),\r
- ExtraData=None\r
- )\r
- Count += 1\r
- ## Generate fds command\r
- @property\r
- def GenFdsCommand(self):\r
- return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()\r
-\r
- @property\r
- def GenFdsCommandDict(self):\r
- FdsCommandDict = {}\r
- LogLevel = EdkLogger.GetLevel()\r
- if LogLevel == EdkLogger.VERBOSE:\r
- FdsCommandDict["verbose"] = True\r
- elif LogLevel <= EdkLogger.DEBUG_9:\r
- FdsCommandDict["debug"] = LogLevel - 1\r
- elif LogLevel == EdkLogger.QUIET:\r
- FdsCommandDict["quiet"] = True\r
-\r
- if GlobalData.gEnableGenfdsMultiThread:\r
- FdsCommandDict["GenfdsMultiThread"] = True\r
- if GlobalData.gIgnoreSource:\r
- FdsCommandDict["IgnoreSources"] = True\r
-\r
- FdsCommandDict["OptionPcd"] = []\r
- for pcd in GlobalData.BuildOptionPcd:\r
- if pcd[2]:\r
- pcdname = '.'.join(pcd[0:3])\r
- else:\r
- pcdname = '.'.join(pcd[0:2])\r
- if pcd[3].startswith('{'):\r
- FdsCommandDict["OptionPcd"].append(pcdname + '=' + 'H' + '"' + pcd[3] + '"')\r
- else:\r
- FdsCommandDict["OptionPcd"].append(pcdname + '=' + pcd[3])\r
-\r
- MacroList = []\r
- # macros passed to GenFds\r
- MacroDict = {}\r
- MacroDict.update(GlobalData.gGlobalDefines)\r
- MacroDict.update(GlobalData.gCommandLineDefines)\r
- for MacroName in MacroDict:\r
- if MacroDict[MacroName] != "":\r
- MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))\r
- else:\r
- MacroList.append('"%s"' % MacroName)\r
- FdsCommandDict["macro"] = MacroList\r
-\r
- FdsCommandDict["fdf_file"] = [self.FdfFile]\r
- FdsCommandDict["build_target"] = self.BuildTarget\r
- FdsCommandDict["toolchain_tag"] = self.ToolChain\r
- FdsCommandDict["active_platform"] = str(self)\r
-\r
- FdsCommandDict["conf_directory"] = GlobalData.gConfDirectory\r
- FdsCommandDict["build_architecture_list"] = ','.join(self.ArchList)\r
- FdsCommandDict["platform_build_directory"] = self.BuildDir\r
-\r
- FdsCommandDict["fd"] = self.FdTargetList\r
- FdsCommandDict["fv"] = self.FvTargetList\r
- FdsCommandDict["cap"] = self.CapTargetList\r
- return FdsCommandDict\r
-\r
- ## Create makefile for the platform and modules in it\r
- #\r
- # @param CreateDepsMakeFile Flag indicating if the makefile for\r
- # modules will be created as well\r
- #\r
- def CreateMakeFile(self, CreateDepsMakeFile=False):\r
- if not CreateDepsMakeFile:\r
- return\r
- for Pa in self.AutoGenObjectList:\r
- Pa.CreateMakeFile(True)\r
-\r
- ## Create autogen code for platform and modules\r
- #\r
- # Since there's no autogen code for platform, this method will do nothing\r
- # if CreateModuleCodeFile is set to False.\r
- #\r
- # @param CreateDepsCodeFile Flag indicating if creating module's\r
- # autogen code file or not\r
- #\r
- def CreateCodeFile(self, CreateDepsCodeFile=False):\r
- if not CreateDepsCodeFile:\r
- return\r
- for Pa in self.AutoGenObjectList:\r
- Pa.CreateCodeFile(True)\r
-\r
- ## Create AsBuilt INF file the platform\r
- #\r
- def CreateAsBuiltInf(self):\r
- return\r
-\r
-\r
-## AutoGen class for platform\r
-#\r
-# PlatformAutoGen class will process the original information in platform\r
-# file in order to generate makefile for platform.\r
-#\r
-class PlatformAutoGen(AutoGen):\r
- # call super().__init__ then call the worker function with different parameter count\r
- def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
- if not hasattr(self, "_Init"):\r
- self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)\r
- self._Init = True\r
- #\r
- # Used to store all PCDs for both PEI and DXE phase, in order to generate\r
- # correct PCD database\r
- #\r
- _DynaPcdList_ = []\r
- _NonDynaPcdList_ = []\r
- _PlatformPcds = {}\r
-\r
- #\r
- # The priority list while override build option\r
- #\r
- PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)\r
- "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r
- "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r
- "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r
- "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r
- "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r
- "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE\r
- "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE\r
- "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r
- "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r
- "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE\r
- "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE\r
- "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE\r
- "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE\r
- "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE\r
- "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)\r
-\r
- ## Initialize PlatformAutoGen\r
- #\r
- #\r
- # @param Workspace WorkspaceAutoGen object\r
- # @param PlatformFile Platform file (DSC file)\r
- # @param Target Build target (DEBUG, RELEASE)\r
- # @param Toolchain Name of tool chain\r
- # @param Arch arch of the platform supports\r
- #\r
- def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))\r
- GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)\r
-\r
- self.MetaFile = PlatformFile\r
- self.Workspace = Workspace\r
- self.WorkspaceDir = Workspace.WorkspaceDir\r
- self.ToolChain = Toolchain\r
- self.BuildTarget = Target\r
- self.Arch = Arch\r
- self.SourceDir = PlatformFile.SubDir\r
- self.FdTargetList = self.Workspace.FdTargetList\r
- self.FvTargetList = self.Workspace.FvTargetList\r
- # get the original module/package/platform objects\r
- self.BuildDatabase = Workspace.BuildDatabase\r
- self.DscBuildDataObj = Workspace.Platform\r
-\r
- # flag indicating if the makefile/C-code file has been created or not\r
- self.IsMakeFileCreated = False\r
-\r
- self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r
- self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r
-\r
- self._AsBuildInfList = []\r
- self._AsBuildModuleList = []\r
-\r
- self.VariableInfo = None\r
-\r
- if GlobalData.gFdfParser is not None:\r
- self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList\r
- for Inf in self._AsBuildInfList:\r
- InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)\r
- M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]\r
- if not M.IsBinaryModule:\r
- continue\r
- self._AsBuildModuleList.append(InfClass)\r
- # get library/modules for build\r
- self.LibraryBuildDirectoryList = []\r
- self.ModuleBuildDirectoryList = []\r
-\r
- return True\r
-\r
- ## hash() operator of PlatformAutoGen\r
- #\r
- # The platform file path and arch string will be used to represent\r
- # hash value of this object\r
- #\r
- # @retval int Hash value of the platform file path and arch\r
- #\r
- @cached_class_function\r
- def __hash__(self):\r
- return hash((self.MetaFile, self.Arch))\r
-\r
- @cached_class_function\r
- def __repr__(self):\r
- return "%s [%s]" % (self.MetaFile, self.Arch)\r
-\r
- ## Create autogen code for platform and modules\r
- #\r
- # Since there's no autogen code for platform, this method will do nothing\r
- # if CreateModuleCodeFile is set to False.\r
- #\r
- # @param CreateModuleCodeFile Flag indicating if creating module's\r
- # autogen code file or not\r
- #\r
- @cached_class_function\r
- def CreateCodeFile(self, CreateModuleCodeFile=False):\r
- # only module has code to be created, so do nothing if CreateModuleCodeFile is False\r
- if not CreateModuleCodeFile:\r
- return\r
-\r
- for Ma in self.ModuleAutoGenList:\r
- Ma.CreateCodeFile(True)\r
-\r
- ## Generate Fds Command\r
- @cached_property\r
- def GenFdsCommand(self):\r
- return self.Workspace.GenFdsCommand\r
-\r
- ## Create makefile for the platform and modules in it\r
- #\r
- # @param CreateModuleMakeFile Flag indicating if the makefile for\r
- # modules will be created as well\r
- #\r
- def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):\r
- if CreateModuleMakeFile:\r
- for Ma in self._MaList:\r
- key = (Ma.MetaFile.File, self.Arch)\r
- if key in FfsCommand:\r
- Ma.CreateMakeFile(True, FfsCommand[key])\r
- else:\r
- Ma.CreateMakeFile(True)\r
-\r
- # no need to create makefile for the platform more than once\r
- if self.IsMakeFileCreated:\r
- return\r
-\r
- # create library/module build dirs for platform\r
- Makefile = GenMake.PlatformMakefile(self)\r
- self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()\r
- self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()\r
-\r
- self.IsMakeFileCreated = True\r
-\r
- @property\r
- def AllPcdList(self):\r
- return self.DynamicPcdList + self.NonDynamicPcdList\r
- ## Deal with Shared FixedAtBuild Pcds\r
- #\r
- def CollectFixedAtBuildPcds(self):\r
- for LibAuto in self.LibraryAutoGenList:\r
- FixedAtBuildPcds = {}\r
- ShareFixedAtBuildPcdsSameValue = {}\r
- for Module in LibAuto.ReferenceModules:\r
- for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):\r
- DefaultValue = Pcd.DefaultValue\r
- # Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib\r
- if Pcd in Module.LibraryPcdList:\r
- Index = Module.LibraryPcdList.index(Pcd)\r
- DefaultValue = Module.LibraryPcdList[Index].DefaultValue\r
- key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r
- if key not in FixedAtBuildPcds:\r
- ShareFixedAtBuildPcdsSameValue[key] = True\r
- FixedAtBuildPcds[key] = DefaultValue\r
- else:\r
- if FixedAtBuildPcds[key] != DefaultValue:\r
- ShareFixedAtBuildPcdsSameValue[key] = False\r
- for Pcd in LibAuto.FixedAtBuildPcds:\r
- key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:\r
- continue\r
- else:\r
- DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]\r
- if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r
- continue\r
- if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:\r
- LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]\r
-\r
- def CollectVariables(self, DynamicPcdSet):\r
- VpdRegionSize = 0\r
- VpdRegionBase = 0\r
- if self.Workspace.FdfFile:\r
- FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]\r
- for FdRegion in FdDict.RegionList:\r
- for item in FdRegion.RegionDataList:\r
- if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:\r
- VpdRegionSize = FdRegion.Size\r
- VpdRegionBase = FdRegion.Offset\r
- break\r
-\r
- VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)\r
- VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)\r
- VariableInfo.SetVpdRegionOffset(VpdRegionBase)\r
- Index = 0\r
- for Pcd in DynamicPcdSet:\r
- pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r
- for SkuName in Pcd.SkuInfoList:\r
- Sku = Pcd.SkuInfoList[SkuName]\r
- SkuId = Sku.SkuId\r
- if SkuId is None or SkuId == '':\r
- continue\r
- if len(Sku.VariableName) > 0:\r
- if Sku.VariableAttribute and 'NV' not in Sku.VariableAttribute:\r
- continue\r
- VariableGuidStructure = Sku.VariableGuidValue\r
- VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)\r
- for StorageName in Sku.DefaultStoreDict:\r
- VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName] if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES else StringToArray(Sku.DefaultStoreDict[StorageName]), Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))\r
- Index += 1\r
- return VariableInfo\r
-\r
- def UpdateNVStoreMaxSize(self, OrgVpdFile):\r
- if self.VariableInfo:\r
- VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)\r
- PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]\r
-\r
- if PcdNvStoreDfBuffer:\r
- if os.path.exists(VpdMapFilePath):\r
- OrgVpdFile.Read(VpdMapFilePath)\r
- PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])\r
- NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'\r
- else:\r
- EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
-\r
- NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)\r
- default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)\r
- maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))\r
- var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)\r
-\r
- if var_data and default_skuobj:\r
- default_skuobj.DefaultValue = var_data\r
- PcdNvStoreDfBuffer[0].DefaultValue = var_data\r
- PcdNvStoreDfBuffer[0].SkuInfoList.clear()\r
- PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj\r
- PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))\r
-\r
- return OrgVpdFile\r
-\r
- ## Collect dynamic PCDs\r
- #\r
- # Gather dynamic PCDs list from each module and their settings from platform\r
- # This interface should be invoked explicitly when platform action is created.\r
- #\r
- def CollectPlatformDynamicPcds(self):\r
- for key in self.Platform.Pcds:\r
- for SinglePcd in GlobalData.MixedPcd:\r
- if (self.Platform.Pcds[key].TokenCName, self.Platform.Pcds[key].TokenSpaceGuidCName) == SinglePcd:\r
- for item in GlobalData.MixedPcd[SinglePcd]:\r
- Pcd_Type = item[0].split('_')[-1]\r
- if (Pcd_Type == self.Platform.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and self.Platform.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \\r
- (Pcd_Type == TAB_PCDS_DYNAMIC and self.Platform.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):\r
- Value = self.Platform.Pcds[key]\r
- Value.TokenCName = self.Platform.Pcds[key].TokenCName + '_' + Pcd_Type\r
- if len(key) == 2:\r
- newkey = (Value.TokenCName, key[1])\r
- elif len(key) == 3:\r
- newkey = (Value.TokenCName, key[1], key[2])\r
- del self.Platform.Pcds[key]\r
- self.Platform.Pcds[newkey] = Value\r
- break\r
- break\r
-\r
- # for gathering error information\r
- NoDatumTypePcdList = set()\r
- FdfModuleList = []\r
- for InfName in self._AsBuildInfList:\r
- InfName = mws.join(self.WorkspaceDir, InfName)\r
- FdfModuleList.append(os.path.normpath(InfName))\r
- for M in self._MaList:\r
-# F is the Module for which M is the module autogen\r
- for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:\r
- # make sure that the "VOID*" kind of datum has MaxDatumSize set\r
- if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:\r
- NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))\r
-\r
- # Check the PCD from Binary INF or Source INF\r
- if M.IsBinaryModule == True:\r
- PcdFromModule.IsFromBinaryInf = True\r
-\r
- # Check the PCD from DSC or not\r
- PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds\r
-\r
- if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- if M.MetaFile.Path not in FdfModuleList:\r
- # If one of the Source built modules listed in the DSC is not listed\r
- # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic\r
- # access method (it is only listed in the DEC file that declares the\r
- # PCD as PcdsDynamic), then build tool will report warning message\r
- # notify the PI that they are attempting to build a module that must\r
- # be included in a flash image in order to be functional. These Dynamic\r
- # PCD will not be added into the Database unless it is used by other\r
- # modules that are included in the FDF file.\r
- if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \\r
- PcdFromModule.IsFromBinaryInf == False:\r
- # Print warning message to let the developer make a determine.\r
- continue\r
- # If one of the Source built modules listed in the DSC is not listed in\r
- # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx\r
- # access method (it is only listed in the DEC file that declares the\r
- # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the\r
- # PCD to the Platform's PCD Database.\r
- if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- continue\r
- #\r
- # If a dynamic PCD used by a PEM module/PEI module & DXE module,\r
- # it should be stored in Pcd PEI database, If a dynamic only\r
- # used by DXE module, it should be stored in DXE PCD database.\r
- # The default Phase is DXE\r
- #\r
- if M.ModuleType in SUP_MODULE_SET_PEI:\r
- PcdFromModule.Phase = "PEI"\r
- if PcdFromModule not in self._DynaPcdList_:\r
- self._DynaPcdList_.append(PcdFromModule)\r
- elif PcdFromModule.Phase == 'PEI':\r
- # overwrite any the same PCD existing, if Phase is PEI\r
- Index = self._DynaPcdList_.index(PcdFromModule)\r
- self._DynaPcdList_[Index] = PcdFromModule\r
- elif PcdFromModule not in self._NonDynaPcdList_:\r
- self._NonDynaPcdList_.append(PcdFromModule)\r
- elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:\r
- Index = self._NonDynaPcdList_.index(PcdFromModule)\r
- if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:\r
- #The PCD from Binary INF will override the same one from source INF\r
- self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])\r
- PcdFromModule.Pending = False\r
- self._NonDynaPcdList_.append (PcdFromModule)\r
- DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}\r
- # add the PCD from modules that listed in FDF but not in DSC to Database\r
- for InfName in FdfModuleList:\r
- if InfName not in DscModuleSet:\r
- InfClass = PathClass(InfName)\r
- M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]\r
- # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)\r
- # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.\r
- # For binary module, if in current arch, we need to list the PCDs into database.\r
- if not M.IsBinaryModule:\r
- continue\r
- # Override the module PCD setting by platform setting\r
- ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)\r
- for PcdFromModule in ModulePcdList:\r
- PcdFromModule.IsFromBinaryInf = True\r
- PcdFromModule.IsFromDsc = False\r
- # Only allow the DynamicEx and Patchable PCD in AsBuild INF\r
- if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:\r
- EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r
- File=self.MetaFile,\r
- ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"\r
- % (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))\r
- # make sure that the "VOID*" kind of datum has MaxDatumSize set\r
- if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:\r
- NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))\r
- if M.ModuleType in SUP_MODULE_SET_PEI:\r
- PcdFromModule.Phase = "PEI"\r
- if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- self._DynaPcdList_.append(PcdFromModule)\r
- elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:\r
- self._NonDynaPcdList_.append(PcdFromModule)\r
- if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- # Overwrite the phase of any the same PCD existing, if Phase is PEI.\r
- # It is to solve the case that a dynamic PCD used by a PEM module/PEI\r
- # module & DXE module at a same time.\r
- # Overwrite the type of the PCDs in source INF by the type of AsBuild\r
- # INF file as DynamicEx.\r
- Index = self._DynaPcdList_.index(PcdFromModule)\r
- self._DynaPcdList_[Index].Phase = PcdFromModule.Phase\r
- self._DynaPcdList_[Index].Type = PcdFromModule.Type\r
- for PcdFromModule in self._NonDynaPcdList_:\r
- # If a PCD is not listed in the DSC file, but binary INF files used by\r
- # this platform all (that use this PCD) list the PCD in a [PatchPcds]\r
- # section, AND all source INF files used by this platform the build\r
- # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]\r
- # section, then the tools must NOT add the PCD to the Platform's PCD\r
- # Database; the build must assign the access method for this PCD as\r
- # PcdsPatchableInModule.\r
- if PcdFromModule not in self._DynaPcdList_:\r
- continue\r
- Index = self._DynaPcdList_.index(PcdFromModule)\r
- if PcdFromModule.IsFromDsc == False and \\r
- PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \\r
- PcdFromModule.IsFromBinaryInf == True and \\r
- self._DynaPcdList_[Index].IsFromBinaryInf == False:\r
- Index = self._DynaPcdList_.index(PcdFromModule)\r
- self._DynaPcdList_.remove (self._DynaPcdList_[Index])\r
-\r
- # print out error information and break the build, if error found\r
- if len(NoDatumTypePcdList) > 0:\r
- NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)\r
- EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r
- File=self.MetaFile,\r
- ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"\r
- % NoDatumTypePcdListString)\r
- self._NonDynamicPcdList = self._NonDynaPcdList_\r
- self._DynamicPcdList = self._DynaPcdList_\r
- #\r
- # Sort dynamic PCD list to:\r
- # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should\r
- # try to be put header of dynamicd List\r
- # 2) If PCD is HII type, the PCD item should be put after unicode type PCD\r
- #\r
- # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.\r
- #\r
- UnicodePcdArray = set()\r
- HiiPcdArray = set()\r
- OtherPcdArray = set()\r
- VpdPcdDict = {}\r
- VpdFile = VpdInfoFile.VpdInfoFile()\r
- NeedProcessVpdMapFile = False\r
-\r
- for pcd in self.Platform.Pcds:\r
- if pcd not in self._PlatformPcds:\r
- self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]\r
-\r
- for item in self._PlatformPcds:\r
- if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
- self._PlatformPcds[item].DatumType = TAB_VOID\r
-\r
- if (self.Workspace.ArchList[-1] == self.Arch):\r
- for Pcd in self._DynamicPcdList:\r
- # just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
- Sku.VpdOffset = Sku.VpdOffset.strip()\r
-\r
- if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
- Pcd.DatumType = TAB_VOID\r
-\r
- # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
- # if found HII type PCD then insert to right of UnicodeIndex\r
- if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
- VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd\r
-\r
- #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer\r
- PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))\r
- if PcdNvStoreDfBuffer:\r
- self.VariableInfo = self.CollectVariables(self._DynamicPcdList)\r
- vardump = self.VariableInfo.dump()\r
- if vardump:\r
- #\r
- #According to PCD_DATABASE_INIT in edk2\MdeModulePkg\Include\Guid\PcdDataBaseSignatureGuid.h,\r
- #the max size for string PCD should not exceed USHRT_MAX 65535(0xffff).\r
- #typedef UINT16 SIZE_INFO;\r
- #//SIZE_INFO SizeTable[];\r
- if len(vardump.split(",")) > 0xffff:\r
- EdkLogger.error("build", RESOURCE_OVERFLOW, 'The current length of PCD %s value is %d, it exceeds to the max size of String PCD.' %(".".join([PcdNvStoreDfBuffer.TokenSpaceGuidCName,PcdNvStoreDfBuffer.TokenCName]) ,len(vardump.split(","))))\r
- PcdNvStoreDfBuffer.DefaultValue = vardump\r
- for skuname in PcdNvStoreDfBuffer.SkuInfoList:\r
- PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump\r
- PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))\r
- else:\r
- #If the end user define [DefaultStores] and [XXX.Menufacturing] in DSC, but forget to configure PcdNvStoreDefaultValueBuffer to PcdsDynamicVpd\r
- if [Pcd for Pcd in self._DynamicPcdList if Pcd.UserDefinedDefaultStoresFlag]:\r
- EdkLogger.warn("build", "PcdNvStoreDefaultValueBuffer should be defined as PcdsDynamicExVpd in dsc file since the DefaultStores is enabled for this platform.\n%s" %self.Platform.MetaFile.Path)\r
- PlatformPcds = sorted(self._PlatformPcds.keys())\r
- #\r
- # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.\r
- #\r
- VpdSkuMap = {}\r
- for PcdKey in PlatformPcds:\r
- Pcd = self._PlatformPcds[PcdKey]\r
- if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \\r
- PcdKey in VpdPcdDict:\r
- Pcd = VpdPcdDict[PcdKey]\r
- SkuValueMap = {}\r
- DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
- if DefaultSku:\r
- PcdValue = DefaultSku.DefaultValue\r
- if PcdValue not in SkuValueMap:\r
- SkuValueMap[PcdValue] = []\r
- VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)\r
- SkuValueMap[PcdValue].append(DefaultSku)\r
-\r
- for (SkuName, Sku) in Pcd.SkuInfoList.items():\r
- Sku.VpdOffset = Sku.VpdOffset.strip()\r
- PcdValue = Sku.DefaultValue\r
- if PcdValue == "":\r
- PcdValue = Pcd.DefaultValue\r
- if Sku.VpdOffset != TAB_STAR:\r
- if PcdValue.startswith("{"):\r
- Alignment = 8\r
- elif PcdValue.startswith("L"):\r
- Alignment = 2\r
- else:\r
- Alignment = 1\r
- try:\r
- VpdOffset = int(Sku.VpdOffset)\r
- except:\r
- try:\r
- VpdOffset = int(Sku.VpdOffset, 16)\r
- except:\r
- EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r
- if VpdOffset % Alignment != 0:\r
- if PcdValue.startswith("{"):\r
- EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)\r
- else:\r
- EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))\r
- if PcdValue not in SkuValueMap:\r
- SkuValueMap[PcdValue] = []\r
- VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)\r
- SkuValueMap[PcdValue].append(Sku)\r
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
- if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:\r
- NeedProcessVpdMapFile = True\r
- if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':\r
- EdkLogger.error("Build", FILE_NOT_FOUND, \\r
- "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
-\r
- VpdSkuMap[PcdKey] = SkuValueMap\r
- #\r
- # Fix the PCDs define in VPD PCD section that never referenced by module.\r
- # An example is PCD for signature usage.\r
- #\r
- for DscPcd in PlatformPcds:\r
- DscPcdEntry = self._PlatformPcds[DscPcd]\r
- if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
- if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):\r
- FoundFlag = False\r
- for VpdPcd in VpdFile._VpdArray:\r
- # This PCD has been referenced by module\r
- if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r
- (VpdPcd.TokenCName == DscPcdEntry.TokenCName):\r
- FoundFlag = True\r
-\r
- # Not found, it should be signature\r
- if not FoundFlag :\r
- # just pick the a value to determine whether is unicode string type\r
- SkuValueMap = {}\r
- SkuObjList = list(DscPcdEntry.SkuInfoList.items())\r
- DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)\r
- if DefaultSku:\r
- defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))\r
- SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]\r
- for (SkuName, Sku) in SkuObjList:\r
- Sku.VpdOffset = Sku.VpdOffset.strip()\r
-\r
- # Need to iterate DEC pcd information to get the value & datumtype\r
- for eachDec in self.PackageList:\r
- for DecPcd in eachDec.Pcds:\r
- DecPcdEntry = eachDec.Pcds[DecPcd]\r
- if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r
- (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):\r
- # Print warning message to let the developer make a determine.\r
- EdkLogger.warn("build", "Unreferenced vpd pcd used!",\r
- File=self.MetaFile, \\r
- ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \\r
- %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))\r
-\r
- DscPcdEntry.DatumType = DecPcdEntry.DatumType\r
- DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue\r
- DscPcdEntry.TokenValue = DecPcdEntry.TokenValue\r
- DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]\r
- # Only fix the value while no value provided in DSC file.\r
- if not Sku.DefaultValue:\r
- DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue\r
-\r
- if DscPcdEntry not in self._DynamicPcdList:\r
- self._DynamicPcdList.append(DscPcdEntry)\r
- Sku.VpdOffset = Sku.VpdOffset.strip()\r
- PcdValue = Sku.DefaultValue\r
- if PcdValue == "":\r
- PcdValue = DscPcdEntry.DefaultValue\r
- if Sku.VpdOffset != TAB_STAR:\r
- if PcdValue.startswith("{"):\r
- Alignment = 8\r
- elif PcdValue.startswith("L"):\r
- Alignment = 2\r
- else:\r
- Alignment = 1\r
- try:\r
- VpdOffset = int(Sku.VpdOffset)\r
- except:\r
- try:\r
- VpdOffset = int(Sku.VpdOffset, 16)\r
- except:\r
- EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))\r
- if VpdOffset % Alignment != 0:\r
- if PcdValue.startswith("{"):\r
- EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)\r
- else:\r
- EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))\r
- if PcdValue not in SkuValueMap:\r
- SkuValueMap[PcdValue] = []\r
- VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)\r
- SkuValueMap[PcdValue].append(Sku)\r
- if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:\r
- NeedProcessVpdMapFile = True\r
- if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):\r
- UnicodePcdArray.add(DscPcdEntry)\r
- elif len(Sku.VariableName) > 0:\r
- HiiPcdArray.add(DscPcdEntry)\r
- else:\r
- OtherPcdArray.add(DscPcdEntry)\r
-\r
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
- VpdSkuMap[DscPcd] = SkuValueMap\r
- if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \\r
- VpdFile.GetCount() != 0:\r
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,\r
- "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r
-\r
- if VpdFile.GetCount() != 0:\r
-\r
- self.FixVpdOffset(VpdFile)\r
-\r
- self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))\r
- PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]\r
- if PcdNvStoreDfBuffer:\r
- PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName\r
- if (PcdName,PcdGuid) in VpdSkuMap:\r
- DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)\r
- VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}\r
-\r
- # Process VPD map file generated by third party BPDG tool\r
- if NeedProcessVpdMapFile:\r
- VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)\r
- if os.path.exists(VpdMapFilePath):\r
- VpdFile.Read(VpdMapFilePath)\r
-\r
- # Fixup TAB_STAR offset\r
- for pcd in VpdSkuMap:\r
- vpdinfo = VpdFile.GetVpdInfo(pcd)\r
- if vpdinfo is None:\r
- # just pick the a value to determine whether is unicode string type\r
- continue\r
- for pcdvalue in VpdSkuMap[pcd]:\r
- for sku in VpdSkuMap[pcd][pcdvalue]:\r
- for item in vpdinfo:\r
- if item[2] == pcdvalue:\r
- sku.VpdOffset = item[1]\r
- else:\r
- EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
-\r
- # Delete the DynamicPcdList At the last time enter into this function\r
- for Pcd in self._DynamicPcdList:\r
- # just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r
- Sku.VpdOffset = Sku.VpdOffset.strip()\r
-\r
- if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
- Pcd.DatumType = TAB_VOID\r
-\r
- PcdValue = Sku.DefaultValue\r
- if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):\r
- # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
- UnicodePcdArray.add(Pcd)\r
- elif len(Sku.VariableName) > 0:\r
- # if found HII type PCD then insert to right of UnicodeIndex\r
- HiiPcdArray.add(Pcd)\r
- else:\r
- OtherPcdArray.add(Pcd)\r
- del self._DynamicPcdList[:]\r
- self._DynamicPcdList.extend(list(UnicodePcdArray))\r
- self._DynamicPcdList.extend(list(HiiPcdArray))\r
- self._DynamicPcdList.extend(list(OtherPcdArray))\r
- allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]\r
- for pcd in self._DynamicPcdList:\r
- if len(pcd.SkuInfoList) == 1:\r
- for (SkuName, SkuId) in allskuset:\r
- if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:\r
- continue\r
- pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r
- pcd.SkuInfoList[SkuName].SkuId = SkuId\r
- pcd.SkuInfoList[SkuName].SkuIdName = SkuName\r
-\r
- def FixVpdOffset(self, VpdFile ):\r
- FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)\r
- if not os.path.exists(FvPath):\r
- try:\r
- os.makedirs(FvPath)\r
- except:\r
- EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r
-\r
- VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r
-\r
- if VpdFile.Write(VpdFilePath):\r
- # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r
- BPDGToolName = None\r
- for ToolDef in self.ToolDefinition.values():\r
- if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:\r
- if "PATH" not in ToolDef:\r
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)\r
- BPDGToolName = ToolDef["PATH"]\r
- break\r
- # Call third party GUID BPDG tool.\r
- if BPDGToolName is not None:\r
- VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r
- else:\r
- EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
-\r
- ## Return the platform build data object\r
- @cached_property\r
- def Platform(self):\r
- return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
-\r
- ## Return platform name\r
- @cached_property\r
- def Name(self):\r
- return self.Platform.PlatformName\r
-\r
- ## Return the meta file GUID\r
- @cached_property\r
- def Guid(self):\r
- return self.Platform.Guid\r
-\r
- ## Return the platform version\r
- @cached_property\r
- def Version(self):\r
- return self.Platform.Version\r
-\r
- ## Return the FDF file name\r
- @cached_property\r
- def FdfFile(self):\r
- if self.Workspace.FdfFile:\r
- RetVal= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)\r
- else:\r
- RetVal = ''\r
- return RetVal\r
-\r
- ## Return the build output directory platform specifies\r
- @cached_property\r
- def OutputDir(self):\r
- return self.Platform.OutputDirectory\r
-\r
- ## Return the directory to store all intermediate and final files built\r
- @cached_property\r
- def BuildDir(self):\r
- if os.path.isabs(self.OutputDir):\r
- GlobalData.gBuildDirectory = RetVal = path.join(\r
- path.abspath(self.OutputDir),\r
- self.BuildTarget + "_" + self.ToolChain,\r
- )\r
- else:\r
- GlobalData.gBuildDirectory = RetVal = path.join(\r
- self.WorkspaceDir,\r
- self.OutputDir,\r
- self.BuildTarget + "_" + self.ToolChain,\r
- )\r
- return RetVal\r
-\r
- ## Return directory of platform makefile\r
- #\r
- # @retval string Makefile directory\r
- #\r
- @cached_property\r
- def MakeFileDir(self):\r
- return path.join(self.BuildDir, self.Arch)\r
-\r
- ## Return build command string\r
- #\r
- # @retval string Build command string\r
- #\r
- @cached_property\r
- def BuildCommand(self):\r
- RetVal = []\r
- if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:\r
- RetVal += _SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r
- if "FLAGS" in self.ToolDefinition["MAKE"]:\r
- NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()\r
- if NewOption != '':\r
- RetVal += _SplitOption(NewOption)\r
- if "MAKE" in self.EdkIIBuildOption:\r
- if "FLAGS" in self.EdkIIBuildOption["MAKE"]:\r
- Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]\r
- if Flags.startswith('='):\r
- RetVal = [RetVal[0]] + [Flags[1:]]\r
- else:\r
- RetVal.append(Flags)\r
- return RetVal\r
-\r
- ## Get tool chain definition\r
- #\r
- # Get each tool definition for given tool chain from tools_def.txt and platform\r
- #\r
- @cached_property\r
- def ToolDefinition(self):\r
- ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary\r
- if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:\r
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",\r
- ExtraData="[%s]" % self.MetaFile)\r
- RetVal = OrderedDict()\r
- DllPathList = set()\r
- for Def in ToolDefinition:\r
- Target, Tag, Arch, Tool, Attr = Def.split("_")\r
- if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:\r
- continue\r
-\r
- Value = ToolDefinition[Def]\r
- # don't record the DLL\r
- if Attr == "DLL":\r
- DllPathList.add(Value)\r
- continue\r
-\r
- if Tool not in RetVal:\r
- RetVal[Tool] = OrderedDict()\r
- RetVal[Tool][Attr] = Value\r
-\r
- ToolsDef = ''\r
- if GlobalData.gOptions.SilentMode and "MAKE" in RetVal:\r
- if "FLAGS" not in RetVal["MAKE"]:\r
- RetVal["MAKE"]["FLAGS"] = ""\r
- RetVal["MAKE"]["FLAGS"] += " -s"\r
- MakeFlags = ''\r
- for Tool in RetVal:\r
- for Attr in RetVal[Tool]:\r
- Value = RetVal[Tool][Attr]\r
- if Tool in self._BuildOptionWithToolDef(RetVal) and Attr in self._BuildOptionWithToolDef(RetVal)[Tool]:\r
- # check if override is indicated\r
- if self._BuildOptionWithToolDef(RetVal)[Tool][Attr].startswith('='):\r
- Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr][1:]\r
- else:\r
- if Attr != 'PATH':\r
- Value += " " + self._BuildOptionWithToolDef(RetVal)[Tool][Attr]\r
- else:\r
- Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr]\r
-\r
- if Attr == "PATH":\r
- # Don't put MAKE definition in the file\r
- if Tool != "MAKE":\r
- ToolsDef += "%s = %s\n" % (Tool, Value)\r
- elif Attr != "DLL":\r
- # Don't put MAKE definition in the file\r
- if Tool == "MAKE":\r
- if Attr == "FLAGS":\r
- MakeFlags = Value\r
- else:\r
- ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)\r
- ToolsDef += "\n"\r
- tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)\r
- SaveFileOnChange(tool_def_file, ToolsDef, False)\r
- for DllPath in DllPathList:\r
- os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]\r
- os.environ["MAKE_FLAGS"] = MakeFlags\r
-\r
- return RetVal\r
-\r
- ## Return the paths of tools\r
- @cached_property\r
- def ToolDefinitionFile(self):\r
- tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)\r
- if not os.path.exists(tool_def_file):\r
- self.ToolDefinition\r
- return tool_def_file\r
-\r
- ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.\r
- @cached_property\r
- def ToolChainFamily(self):\r
- ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r
- if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \\r
- or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \\r
- or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:\r
- EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \\r
- % self.ToolChain)\r
- RetVal = TAB_COMPILER_MSFT\r
- else:\r
- RetVal = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]\r
- return RetVal\r
-\r
- @cached_property\r
- def BuildRuleFamily(self):\r
- ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r
- if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \\r
- or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \\r
- or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:\r
- EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \\r
- % self.ToolChain)\r
- return TAB_COMPILER_MSFT\r
-\r
- return ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]\r
-\r
- ## Return the build options specific for all modules in this platform\r
- @cached_property\r
- def BuildOption(self):\r
- return self._ExpandBuildOption(self.Platform.BuildOptions)\r
-\r
- def _BuildOptionWithToolDef(self, ToolDef):\r
- return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)\r
-\r
- ## Return the build options specific for EDK modules in this platform\r
- @cached_property\r
- def EdkBuildOption(self):\r
- return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)\r
-\r
- ## Return the build options specific for EDKII modules in this platform\r
- @cached_property\r
- def EdkIIBuildOption(self):\r
- return self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)\r
-\r
- ## Summarize the packages used by modules in this platform\r
- @cached_property\r
- def PackageList(self):\r
- RetVal = set()\r
- for La in self.LibraryAutoGenList:\r
- RetVal.update(La.DependentPackageList)\r
- for Ma in self.ModuleAutoGenList:\r
- RetVal.update(Ma.DependentPackageList)\r
- #Collect package set information from INF of FDF\r
- for ModuleFile in self._AsBuildModuleList:\r
- if ModuleFile in self.Platform.Modules:\r
- continue\r
- ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]\r
- RetVal.update(ModuleData.Packages)\r
- return list(RetVal)\r
-\r
- @cached_property\r
- def NonDynamicPcdDict(self):\r
- return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}\r
-\r
- ## Get list of non-dynamic PCDs\r
- @property\r
- def NonDynamicPcdList(self):\r
- if not self._NonDynamicPcdList:\r
- self.CollectPlatformDynamicPcds()\r
- return self._NonDynamicPcdList\r
-\r
- ## Get list of dynamic PCDs\r
- @property\r
- def DynamicPcdList(self):\r
- if not self._DynamicPcdList:\r
- self.CollectPlatformDynamicPcds()\r
- return self._DynamicPcdList\r
-\r
- ## Generate Token Number for all PCD\r
- @cached_property\r
- def PcdTokenNumber(self):\r
- RetVal = OrderedDict()\r
- TokenNumber = 1\r
- #\r
- # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.\r
- # Such as:\r
- #\r
- # Dynamic PCD:\r
- # TokenNumber 0 ~ 10\r
- # DynamicEx PCD:\r
- # TokeNumber 11 ~ 20\r
- #\r
- for Pcd in self.DynamicPcdList:\r
- if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:\r
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
- RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
- TokenNumber += 1\r
-\r
- for Pcd in self.DynamicPcdList:\r
- if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
- RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
- TokenNumber += 1\r
-\r
- for Pcd in self.DynamicPcdList:\r
- if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:\r
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
- RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
- TokenNumber += 1\r
-\r
- for Pcd in self.DynamicPcdList:\r
- if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
- RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
- TokenNumber += 1\r
-\r
- for Pcd in self.NonDynamicPcdList:\r
- RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
- TokenNumber += 1\r
- return RetVal\r
-\r
- @cached_property\r
- def _MaList(self):\r
- for ModuleFile in self.Platform.Modules:\r
- Ma = ModuleAutoGen(\r
- self.Workspace,\r
- ModuleFile,\r
- self.BuildTarget,\r
- self.ToolChain,\r
- self.Arch,\r
- self.MetaFile\r
- )\r
- self.Platform.Modules[ModuleFile].M = Ma\r
- return [x.M for x in self.Platform.Modules.values()]\r
-\r
- ## Summarize ModuleAutoGen objects of all modules to be built for this platform\r
- @cached_property\r
- def ModuleAutoGenList(self):\r
- RetVal = []\r
- for Ma in self._MaList:\r
- if Ma not in RetVal:\r
- RetVal.append(Ma)\r
- return RetVal\r
-\r
- ## Summarize ModuleAutoGen objects of all libraries to be built for this platform\r
- @cached_property\r
- def LibraryAutoGenList(self):\r
- RetVal = []\r
- for Ma in self._MaList:\r
- for La in Ma.LibraryAutoGenList:\r
- if La not in RetVal:\r
- RetVal.append(La)\r
- if Ma not in La.ReferenceModules:\r
- La.ReferenceModules.append(Ma)\r
- return RetVal\r
-\r
- ## Test if a module is supported by the platform\r
- #\r
- # An error will be raised directly if the module or its arch is not supported\r
- # by the platform or current configuration\r
- #\r
- def ValidModule(self, Module):\r
- return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \\r
- or Module in self._AsBuildModuleList\r
-\r
- ## Resolve the library classes in a module to library instances\r
- #\r
- # This method will not only resolve library classes but also sort the library\r
- # instances according to the dependency-ship.\r
- #\r
- # @param Module The module from which the library classes will be resolved\r
- #\r
- # @retval library_list List of library instances sorted\r
- #\r
- def ApplyLibraryInstance(self, Module):\r
- # Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly\r
- if str(Module) not in self.Platform.Modules:\r
- return []\r
-\r
- return GetModuleLibInstances(Module,\r
- self.Platform,\r
- self.BuildDatabase,\r
- self.Arch,\r
- self.BuildTarget,\r
- self.ToolChain,\r
- self.MetaFile,\r
- EdkLogger)\r
-\r
- ## Override PCD setting (type, value, ...)\r
- #\r
- # @param ToPcd The PCD to be overridden\r
- # @param FromPcd The PCD overriding from\r
- #\r
- def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):\r
- #\r
- # in case there's PCDs coming from FDF file, which have no type given.\r
- # at this point, ToPcd.Type has the type found from dependent\r
- # package\r
- #\r
- TokenCName = ToPcd.TokenCName\r
- for PcdItem in GlobalData.MixedPcd:\r
- if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
- TokenCName = PcdItem[0]\r
- break\r
- if FromPcd is not None:\r
- if ToPcd.Pending and FromPcd.Type:\r
- ToPcd.Type = FromPcd.Type\r
- elif ToPcd.Type and FromPcd.Type\\r
- and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:\r
- if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:\r
- ToPcd.Type = FromPcd.Type\r
- elif ToPcd.Type and FromPcd.Type \\r
- and ToPcd.Type != FromPcd.Type:\r
- if Library:\r
- Module = str(Module) + " 's library file (" + str(Library) + ")"\r
- EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",\r
- ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\\r
- % (ToPcd.TokenSpaceGuidCName, TokenCName,\r
- ToPcd.Type, Module, FromPcd.Type, Msg),\r
- File=self.MetaFile)\r
-\r
- if FromPcd.MaxDatumSize:\r
- ToPcd.MaxDatumSize = FromPcd.MaxDatumSize\r
- ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize\r
- if FromPcd.DefaultValue:\r
- ToPcd.DefaultValue = FromPcd.DefaultValue\r
- if FromPcd.TokenValue:\r
- ToPcd.TokenValue = FromPcd.TokenValue\r
- if FromPcd.DatumType:\r
- ToPcd.DatumType = FromPcd.DatumType\r
- if FromPcd.SkuInfoList:\r
- ToPcd.SkuInfoList = FromPcd.SkuInfoList\r
- if FromPcd.UserDefinedDefaultStoresFlag:\r
- ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag\r
- # Add Flexible PCD format parse\r
- if ToPcd.DefaultValue:\r
- try:\r
- ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self.Workspace._GuidDict)(True)\r
- except BadExpression as Value:\r
- EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),\r
- File=self.MetaFile)\r
-\r
- # check the validation of datum\r
- IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)\r
- if not IsValid:\r
- EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,\r
- ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))\r
- ToPcd.validateranges = FromPcd.validateranges\r
- ToPcd.validlists = FromPcd.validlists\r
- ToPcd.expressions = FromPcd.expressions\r
- ToPcd.CustomAttribute = FromPcd.CustomAttribute\r
-\r
- if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \\r
- % (ToPcd.TokenSpaceGuidCName, TokenCName))\r
- Value = ToPcd.DefaultValue\r
- if not Value:\r
- ToPcd.MaxDatumSize = '1'\r
- elif Value[0] == 'L':\r
- ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)\r
- elif Value[0] == '{':\r
- ToPcd.MaxDatumSize = str(len(Value.split(',')))\r
- else:\r
- ToPcd.MaxDatumSize = str(len(Value) - 1)\r
-\r
- # apply default SKU for dynamic PCDS if specified one is not available\r
- if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \\r
- and not ToPcd.SkuInfoList:\r
- if self.Platform.SkuName in self.Platform.SkuIds:\r
- SkuName = self.Platform.SkuName\r
- else:\r
- SkuName = TAB_DEFAULT\r
- ToPcd.SkuInfoList = {\r
- SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)\r
- }\r
-\r
- ## Apply PCD setting defined platform to a module\r
- #\r
- # @param Module The module from which the PCD setting will be overridden\r
- #\r
- # @retval PCD_list The list PCDs with settings from platform\r
- #\r
- def ApplyPcdSetting(self, Module, Pcds, Library=""):\r
- # for each PCD in module\r
- for Name, Guid in Pcds:\r
- PcdInModule = Pcds[Name, Guid]\r
- # find out the PCD setting in platform\r
- if (Name, Guid) in self.Platform.Pcds:\r
- PcdInPlatform = self.Platform.Pcds[Name, Guid]\r
- else:\r
- PcdInPlatform = None\r
- # then override the settings if any\r
- self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)\r
- # resolve the VariableGuid value\r
- for SkuId in PcdInModule.SkuInfoList:\r
- Sku = PcdInModule.SkuInfoList[SkuId]\r
- if Sku.VariableGuid == '': continue\r
- Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)\r
- if Sku.VariableGuidValue is None:\r
- PackageList = "\n\t".join(str(P) for P in self.PackageList)\r
- EdkLogger.error(\r
- 'build',\r
- RESOURCE_NOT_AVAILABLE,\r
- "Value of GUID [%s] is not found in" % Sku.VariableGuid,\r
- ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \\r
- % (Guid, Name, str(Module)),\r
- File=self.MetaFile\r
- )\r
-\r
- # override PCD settings with module specific setting\r
- if Module in self.Platform.Modules:\r
- PlatformModule = self.Platform.Modules[str(Module)]\r
- for Key in PlatformModule.Pcds:\r
- if GlobalData.BuildOptionPcd:\r
- for pcd in GlobalData.BuildOptionPcd:\r
- (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd\r
- if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":\r
- PlatformModule.Pcds[Key].DefaultValue = pcdvalue\r
- PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue\r
- break\r
- Flag = False\r
- if Key in Pcds:\r
- ToPcd = Pcds[Key]\r
- Flag = True\r
- elif Key in GlobalData.MixedPcd:\r
- for PcdItem in GlobalData.MixedPcd[Key]:\r
- if PcdItem in Pcds:\r
- ToPcd = Pcds[PcdItem]\r
- Flag = True\r
- break\r
- if Flag:\r
- self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)\r
- # use PCD value to calculate the MaxDatumSize when it is not specified\r
- for Name, Guid in Pcds:\r
- Pcd = Pcds[Name, Guid]\r
- if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:\r
- Pcd.MaxSizeUserSet = None\r
- Value = Pcd.DefaultValue\r
- if not Value:\r
- Pcd.MaxDatumSize = '1'\r
- elif Value[0] == 'L':\r
- Pcd.MaxDatumSize = str((len(Value) - 2) * 2)\r
- elif Value[0] == '{':\r
- Pcd.MaxDatumSize = str(len(Value.split(',')))\r
- else:\r
- Pcd.MaxDatumSize = str(len(Value) - 1)\r
- return list(Pcds.values())\r
-\r
-\r
-\r
- ## Calculate the priority value of the build option\r
- #\r
- # @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r
- #\r
- # @retval Value Priority value based on the priority list.\r
- #\r
- def CalculatePriorityValue(self, Key):\r
- Target, ToolChain, Arch, CommandType, Attr = Key.split('_')\r
- PriorityValue = 0x11111\r
- if Target == TAB_STAR:\r
- PriorityValue &= 0x01111\r
- if ToolChain == TAB_STAR:\r
- PriorityValue &= 0x10111\r
- if Arch == TAB_STAR:\r
- PriorityValue &= 0x11011\r
- if CommandType == TAB_STAR:\r
- PriorityValue &= 0x11101\r
- if Attr == TAB_STAR:\r
- PriorityValue &= 0x11110\r
-\r
- return self.PrioList["0x%0.5x" % PriorityValue]\r
-\r
-\r
- ## Expand * in build option key\r
- #\r
- # @param Options Options to be expanded\r
- # @param ToolDef Use specified ToolDef instead of full version.\r
- # This is needed during initialization to prevent\r
- # infinite recursion betweeh BuildOptions,\r
- # ToolDefinition, and this function.\r
- #\r
- # @retval options Options expanded\r
- #\r
- def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):\r
- if not ToolDef:\r
- ToolDef = self.ToolDefinition\r
- BuildOptions = {}\r
- FamilyMatch = False\r
- FamilyIsNull = True\r
-\r
- OverrideList = {}\r
- #\r
- # Construct a list contain the build options which need override.\r
- #\r
- for Key in Options:\r
- #\r
- # Key[0] -- tool family\r
- # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r
- #\r
- if (Key[0] == self.BuildRuleFamily and\r
- (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):\r
- Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')\r
- if (Target == self.BuildTarget or Target == TAB_STAR) and\\r
- (ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\\r
- (Arch == self.Arch or Arch == TAB_STAR) and\\r
- Options[Key].startswith("="):\r
-\r
- if OverrideList.get(Key[1]) is not None:\r
- OverrideList.pop(Key[1])\r
- OverrideList[Key[1]] = Options[Key]\r
-\r
- #\r
- # Use the highest priority value.\r
- #\r
- if (len(OverrideList) >= 2):\r
- KeyList = list(OverrideList.keys())\r
- for Index in range(len(KeyList)):\r
- NowKey = KeyList[Index]\r
- Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")\r
- for Index1 in range(len(KeyList) - Index - 1):\r
- NextKey = KeyList[Index1 + Index + 1]\r
- #\r
- # Compare two Key, if one is included by another, choose the higher priority one\r
- #\r
- Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")\r
- if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\\r
- (ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\\r
- (Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\\r
- (CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\\r
- (Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):\r
-\r
- if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):\r
- if Options.get((self.BuildRuleFamily, NextKey)) is not None:\r
- Options.pop((self.BuildRuleFamily, NextKey))\r
- else:\r
- if Options.get((self.BuildRuleFamily, NowKey)) is not None:\r
- Options.pop((self.BuildRuleFamily, NowKey))\r
-\r
- for Key in Options:\r
- if ModuleStyle is not None and len (Key) > 2:\r
- # Check Module style is EDK or EDKII.\r
- # Only append build option for the matched style module.\r
- if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r
- continue\r
- elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:\r
- continue\r
- Family = Key[0]\r
- Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r
- # if tool chain family doesn't match, skip it\r
- if Tool in ToolDef and Family != "":\r
- FamilyIsNull = False\r
- if ToolDef[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":\r
- if Family != ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:\r
- continue\r
- elif Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:\r
- continue\r
- FamilyMatch = True\r
- # expand any wildcard\r
- if Target == TAB_STAR or Target == self.BuildTarget:\r
- if Tag == TAB_STAR or Tag == self.ToolChain:\r
- if Arch == TAB_STAR or Arch == self.Arch:\r
- if Tool not in BuildOptions:\r
- BuildOptions[Tool] = {}\r
- if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r
- BuildOptions[Tool][Attr] = Options[Key]\r
- else:\r
- # append options for the same tool except PATH\r
- if Attr != 'PATH':\r
- BuildOptions[Tool][Attr] += " " + Options[Key]\r
- else:\r
- BuildOptions[Tool][Attr] = Options[Key]\r
- # Build Option Family has been checked, which need't to be checked again for family.\r
- if FamilyMatch or FamilyIsNull:\r
- return BuildOptions\r
-\r
- for Key in Options:\r
- if ModuleStyle is not None and len (Key) > 2:\r
- # Check Module style is EDK or EDKII.\r
- # Only append build option for the matched style module.\r
- if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r
- continue\r
- elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:\r
- continue\r
- Family = Key[0]\r
- Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r
- # if tool chain family doesn't match, skip it\r
- if Tool not in ToolDef or Family == "":\r
- continue\r
- # option has been added before\r
- if Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:\r
- continue\r
-\r
- # expand any wildcard\r
- if Target == TAB_STAR or Target == self.BuildTarget:\r
- if Tag == TAB_STAR or Tag == self.ToolChain:\r
- if Arch == TAB_STAR or Arch == self.Arch:\r
- if Tool not in BuildOptions:\r
- BuildOptions[Tool] = {}\r
- if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r
- BuildOptions[Tool][Attr] = Options[Key]\r
- else:\r
- # append options for the same tool except PATH\r
- if Attr != 'PATH':\r
- BuildOptions[Tool][Attr] += " " + Options[Key]\r
- else:\r
- BuildOptions[Tool][Attr] = Options[Key]\r
- return BuildOptions\r
- def GetGlobalBuildOptions(self,Module):\r
- ModuleTypeOptions = self.Platform.GetBuildOptionsByPkg(Module, Module.ModuleType)\r
- ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)\r
- if Module in self.Platform.Modules:\r
- PlatformModule = self.Platform.Modules[str(Module)]\r
- PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)\r
- else:\r
- PlatformModuleOptions = {}\r
- return ModuleTypeOptions, PlatformModuleOptions\r
- ## Append build options in platform to a module\r
- #\r
- # @param Module The module to which the build options will be appended\r
- #\r
- # @retval options The options appended with build options in platform\r
- #\r
- def ApplyBuildOption(self, Module):\r
- # Get the different options for the different style module\r
- PlatformOptions = self.EdkIIBuildOption\r
- ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)\r
- ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)\r
- ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)\r
- if Module in self.Platform.Modules:\r
- PlatformModule = self.Platform.Modules[str(Module)]\r
- PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)\r
- else:\r
- PlatformModuleOptions = {}\r
-\r
- BuildRuleOrder = None\r
- for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r
- for Tool in Options:\r
- for Attr in Options[Tool]:\r
- if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r
- BuildRuleOrder = Options[Tool][Attr]\r
-\r
- AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +\r
- list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +\r
- list(self.ToolDefinition.keys()))\r
- BuildOptions = defaultdict(lambda: defaultdict(str))\r
- for Tool in AllTools:\r
- for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r
- if Tool not in Options:\r
- continue\r
- for Attr in Options[Tool]:\r
- #\r
- # Do not generate it in Makefile\r
- #\r
- if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r
- continue\r
- Value = Options[Tool][Attr]\r
- # check if override is indicated\r
- if Value.startswith('='):\r
- BuildOptions[Tool][Attr] = mws.handleWsMacro(Value[1:])\r
- else:\r
- if Attr != 'PATH':\r
- BuildOptions[Tool][Attr] += " " + mws.handleWsMacro(Value)\r
- else:\r
- BuildOptions[Tool][Attr] = mws.handleWsMacro(Value)\r
-\r
- return BuildOptions, BuildRuleOrder\r
-\r
-#\r
-# extend lists contained in a dictionary with lists stored in another dictionary\r
-# if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r
-#\r
-def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r
- for Key in CopyFromDict:\r
- CopyToDict[Key].extend(CopyFromDict[Key])\r
-\r
-# Create a directory specified by a set of path elements and return the full path\r
-def _MakeDir(PathList):\r
- RetVal = path.join(*PathList)\r
- CreateDirectory(RetVal)\r
- return RetVal\r
-\r
-## ModuleAutoGen class\r
-#\r
-# This class encapsules the AutoGen behaviors for the build tools. In addition to\r
-# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r
-# to the [depex] section in module's inf file.\r
-#\r
-class ModuleAutoGen(AutoGen):\r
- # call super().__init__ then call the worker function with different parameter count\r
- def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
- if not hasattr(self, "_Init"):\r
- self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r
- self._Init = True\r
-\r
- ## Cache the timestamps of metafiles of every module in a class attribute\r
- #\r
- TimeDict = {}\r
-\r
- def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
- # check if this module is employed by active platform\r
- if not PlatformAutoGen(Workspace, args[0], Target, Toolchain, Arch).ValidModule(MetaFile):\r
- EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r
- % (MetaFile, Arch))\r
- return None\r
- return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
-\r
- ## Initialize ModuleAutoGen\r
- #\r
- # @param Workspace EdkIIWorkspaceBuild object\r
- # @param ModuleFile The path of module file\r
- # @param Target Build target (DEBUG, RELEASE)\r
- # @param Toolchain Name of tool chain\r
- # @param Arch The arch the module supports\r
- # @param PlatformFile Platform meta-file\r
- #\r
- def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile):\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r
- GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r
-\r
- self.Workspace = Workspace\r
- self.WorkspaceDir = Workspace.WorkspaceDir\r
- self.MetaFile = ModuleFile\r
- self.PlatformInfo = PlatformAutoGen(Workspace, PlatformFile, Target, Toolchain, Arch)\r
-\r
- self.SourceDir = self.MetaFile.SubDir\r
- self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r
-\r
- self.ToolChain = Toolchain\r
- self.BuildTarget = Target\r
- self.Arch = Arch\r
- self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r
- self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r
-\r
- self.IsCodeFileCreated = False\r
- self.IsAsBuiltInfCreated = False\r
- self.DepexGenerated = False\r
-\r
- self.BuildDatabase = self.Workspace.BuildDatabase\r
- self.BuildRuleOrder = None\r
- self.BuildTime = 0\r
-\r
- self._PcdComments = OrderedListDict()\r
- self._GuidComments = OrderedListDict()\r
- self._ProtocolComments = OrderedListDict()\r
- self._PpiComments = OrderedListDict()\r
- self._BuildTargets = None\r
- self._IntroBuildTargetList = None\r
- self._FinalBuildTargetList = None\r
- self._FileTypes = None\r
-\r
- self.AutoGenDepSet = set()\r
- self.ReferenceModules = []\r
- self.ConstPcd = {}\r
-\r
- ## hash() operator of ModuleAutoGen\r
- #\r
- # The module file path and arch string will be used to represent\r
- # hash value of this object\r
- #\r
- # @retval int Hash value of the module file path and arch\r
- #\r
- @cached_class_function\r
- def __hash__(self):\r
- return hash((self.MetaFile, self.Arch))\r
-\r
- def __repr__(self):\r
- return "%s [%s]" % (self.MetaFile, self.Arch)\r
-\r
- # Get FixedAtBuild Pcds of this Module\r
- @cached_property\r
- def FixedAtBuildPcds(self):\r
- RetVal = []\r
- for Pcd in self.ModulePcdList:\r
- if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r
- continue\r
- if Pcd not in RetVal:\r
- RetVal.append(Pcd)\r
- return RetVal\r
-\r
- @cached_property\r
- def FixedVoidTypePcds(self):\r
- RetVal = {}\r
- for Pcd in self.FixedAtBuildPcds:\r
- if Pcd.DatumType == TAB_VOID:\r
- if '{}.{}'.format(Pcd.TokenSpaceGuidCName, Pcd.TokenCName) not in RetVal:\r
- RetVal['{}.{}'.format(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)] = Pcd.DefaultValue\r
- return RetVal\r
-\r
- @property\r
- def UniqueBaseName(self):\r
- BaseName = self.Name\r
- for Module in self.PlatformInfo.ModuleAutoGenList:\r
- if Module.MetaFile == self.MetaFile:\r
- continue\r
- if Module.Name == self.Name:\r
- if uuid.UUID(Module.Guid) == uuid.UUID(self.Guid):\r
- EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'\r
- ' %s\n %s' % (Module.MetaFile, self.MetaFile))\r
- BaseName = '%s_%s' % (self.Name, self.Guid)\r
- return BaseName\r
-\r
- # Macros could be used in build_rule.txt (also Makefile)\r
- @cached_property\r
- def Macros(self):\r
- return OrderedDict((\r
- ("WORKSPACE" ,self.WorkspaceDir),\r
- ("MODULE_NAME" ,self.Name),\r
- ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r
- ("MODULE_GUID" ,self.Guid),\r
- ("MODULE_VERSION" ,self.Version),\r
- ("MODULE_TYPE" ,self.ModuleType),\r
- ("MODULE_FILE" ,str(self.MetaFile)),\r
- ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r
- ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r
- ("MODULE_DIR" ,self.SourceDir),\r
- ("BASE_NAME" ,self.Name),\r
- ("ARCH" ,self.Arch),\r
- ("TOOLCHAIN" ,self.ToolChain),\r
- ("TOOLCHAIN_TAG" ,self.ToolChain),\r
- ("TOOL_CHAIN_TAG" ,self.ToolChain),\r
- ("TARGET" ,self.BuildTarget),\r
- ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r
- ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
- ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
- ("MODULE_BUILD_DIR" ,self.BuildDir),\r
- ("OUTPUT_DIR" ,self.OutputDir),\r
- ("DEBUG_DIR" ,self.DebugDir),\r
- ("DEST_DIR_OUTPUT" ,self.OutputDir),\r
- ("DEST_DIR_DEBUG" ,self.DebugDir),\r
- ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r
- ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r
- ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r
- ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r
- ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r
- ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r
- ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r
- ))\r
-\r
- ## Return the module build data object\r
- @cached_property\r
- def Module(self):\r
- return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
-\r
- ## Return the module name\r
- @cached_property\r
- def Name(self):\r
- return self.Module.BaseName\r
-\r
- ## Return the module DxsFile if exist\r
- @cached_property\r
- def DxsFile(self):\r
- return self.Module.DxsFile\r
-\r
- ## Return the module meta-file GUID\r
- @cached_property\r
- def Guid(self):\r
- #\r
- # To build same module more than once, the module path with FILE_GUID overridden has\r
- # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r
- # in DSC. The overridden GUID can be retrieved from file name\r
- #\r
- if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r
- #\r
- # Length of GUID is 36\r
- #\r
- return os.path.basename(self.MetaFile.Path)[:36]\r
- return self.Module.Guid\r
-\r
- ## Return the module version\r
- @cached_property\r
- def Version(self):\r
- return self.Module.Version\r
-\r
- ## Return the module type\r
- @cached_property\r
- def ModuleType(self):\r
- return self.Module.ModuleType\r
-\r
- ## Return the component type (for Edk.x style of module)\r
- @cached_property\r
- def ComponentType(self):\r
- return self.Module.ComponentType\r
-\r
- ## Return the build type\r
- @cached_property\r
- def BuildType(self):\r
- return self.Module.BuildType\r
-\r
- ## Return the PCD_IS_DRIVER setting\r
- @cached_property\r
- def PcdIsDriver(self):\r
- return self.Module.PcdIsDriver\r
-\r
- ## Return the autogen version, i.e. module meta-file version\r
- @cached_property\r
- def AutoGenVersion(self):\r
- return self.Module.AutoGenVersion\r
-\r
- ## Check if the module is library or not\r
- @cached_property\r
- def IsLibrary(self):\r
- return bool(self.Module.LibraryClass)\r
-\r
- ## Check if the module is binary module or not\r
- @cached_property\r
- def IsBinaryModule(self):\r
- return self.Module.IsBinaryModule\r
-\r
- ## Return the directory to store intermediate files of the module\r
- @cached_property\r
- def BuildDir(self):\r
- return _MakeDir((\r
- self.PlatformInfo.BuildDir,\r
- self.Arch,\r
- self.SourceDir,\r
- self.MetaFile.BaseName\r
- ))\r
-\r
- ## Return the directory to store the intermediate object files of the module\r
- @cached_property\r
- def OutputDir(self):\r
- return _MakeDir((self.BuildDir, "OUTPUT"))\r
-\r
- ## Return the directory path to store ffs file\r
- @cached_property\r
- def FfsOutputDir(self):\r
- if GlobalData.gFdfParser:\r
- return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
- return ''\r
-\r
- ## Return the directory to store auto-gened source files of the module\r
- @cached_property\r
- def DebugDir(self):\r
- return _MakeDir((self.BuildDir, "DEBUG"))\r
-\r
- ## Return the path of custom file\r
- @cached_property\r
- def CustomMakefile(self):\r
- RetVal = {}\r
- for Type in self.Module.CustomMakefile:\r
- MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
- RetVal[MakeType] = File\r
- return RetVal\r
-\r
- ## Return the directory of the makefile\r
- #\r
- # @retval string The directory string of module's makefile\r
- #\r
- @cached_property\r
- def MakeFileDir(self):\r
- return self.BuildDir\r
-\r
- ## Return build command string\r
- #\r
- # @retval string Build command string\r
- #\r
- @cached_property\r
- def BuildCommand(self):\r
- return self.PlatformInfo.BuildCommand\r
-\r
- ## Get object list of all packages the module and its dependent libraries belong to\r
- #\r
- # @retval list The list of package object\r
- #\r
- @cached_property\r
- def DerivedPackageList(self):\r
- PackageList = []\r
- for M in [self.Module] + self.DependentLibraryList:\r
- for Package in M.Packages:\r
- if Package in PackageList:\r
- continue\r
- PackageList.append(Package)\r
- return PackageList\r
-\r
- ## Get the depex string\r
- #\r
- # @return : a string contain all depex expression.\r
- def _GetDepexExpresionString(self):\r
- DepexStr = ''\r
- DepexList = []\r
- ## DPX_SOURCE IN Define section.\r
- if self.Module.DxsFile:\r
- return DepexStr\r
- for M in [self.Module] + self.DependentLibraryList:\r
- Filename = M.MetaFile.Path\r
- InfObj = InfSectionParser.InfSectionParser(Filename)\r
- DepexExpressionList = InfObj.GetDepexExpresionList()\r
- for DepexExpression in DepexExpressionList:\r
- for key in DepexExpression:\r
- Arch, ModuleType = key\r
- DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r
- # the type of build module is USER_DEFINED.\r
- # All different DEPEX section tags would be copied into the As Built INF file\r
- # and there would be separate DEPEX section tags\r
- if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
- if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r
- DepexList.append({(Arch, ModuleType): DepexExpr})\r
- else:\r
- if Arch.upper() == TAB_ARCH_COMMON or \\r
- (Arch.upper() == self.Arch.upper() and \\r
- ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r
- DepexList.append({(Arch, ModuleType): DepexExpr})\r
-\r
- #the type of build module is USER_DEFINED.\r
- if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
- for Depex in DepexList:\r
- for key in Depex:\r
- DepexStr += '[Depex.%s.%s]\n' % key\r
- DepexStr += '\n'.join('# '+ val for val in Depex[key])\r
- DepexStr += '\n\n'\r
- if not DepexStr:\r
- return '[Depex.%s]\n' % self.Arch\r
- return DepexStr\r
-\r
- #the type of build module not is USER_DEFINED.\r
- Count = 0\r
- for Depex in DepexList:\r
- Count += 1\r
- if DepexStr != '':\r
- DepexStr += ' AND '\r
- DepexStr += '('\r
- for D in Depex.values():\r
- DepexStr += ' '.join(val for val in D)\r
- Index = DepexStr.find('END')\r
- if Index > -1 and Index == len(DepexStr) - 3:\r
- DepexStr = DepexStr[:-3]\r
- DepexStr = DepexStr.strip()\r
- DepexStr += ')'\r
- if Count == 1:\r
- DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r
- if not DepexStr:\r
- return '[Depex.%s]\n' % self.Arch\r
- return '[Depex.%s]\n# ' % self.Arch + DepexStr\r
-\r
- ## Merge dependency expression\r
- #\r
- # @retval list The token list of the dependency expression after parsed\r
- #\r
- @cached_property\r
- def DepexList(self):\r
- if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
- return {}\r
-\r
- DepexList = []\r
- #\r
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r
- #\r
- for M in [self.Module] + self.DependentLibraryList:\r
- Inherited = False\r
- for D in M.Depex[self.Arch, self.ModuleType]:\r
- if DepexList != []:\r
- DepexList.append('AND')\r
- DepexList.append('(')\r
- #replace D with value if D is FixedAtBuild PCD\r
- NewList = []\r
- for item in D:\r
- if '.' not in item:\r
- NewList.append(item)\r
- else:\r
- FixedVoidTypePcds = {}\r
- if item in self.FixedVoidTypePcds:\r
- FixedVoidTypePcds = self.FixedVoidTypePcds\r
- elif M in self.PlatformInfo.LibraryAutoGenList:\r
- Index = self.PlatformInfo.LibraryAutoGenList.index(M)\r
- FixedVoidTypePcds = self.PlatformInfo.LibraryAutoGenList[Index].FixedVoidTypePcds\r
- if item not in FixedVoidTypePcds:\r
- EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r
- else:\r
- Value = FixedVoidTypePcds[item]\r
- if len(Value.split(',')) != 16:\r
- EdkLogger.error("build", FORMAT_INVALID,\r
- "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r
- NewList.append(Value)\r
- DepexList.extend(NewList)\r
- if DepexList[-1] == 'END': # no need of a END at this time\r
- DepexList.pop()\r
- DepexList.append(')')\r
- Inherited = True\r
- if Inherited:\r
- EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexList))\r
- if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r
- break\r
- if len(DepexList) > 0:\r
- EdkLogger.verbose('')\r
- return {self.ModuleType:DepexList}\r
-\r
- ## Merge dependency expression\r
- #\r
- # @retval list The token list of the dependency expression after parsed\r
- #\r
- @cached_property\r
- def DepexExpressionDict(self):\r
- if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
- return {}\r
-\r
- DepexExpressionString = ''\r
- #\r
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r
- #\r
- for M in [self.Module] + self.DependentLibraryList:\r
- Inherited = False\r
- for D in M.DepexExpression[self.Arch, self.ModuleType]:\r
- if DepexExpressionString != '':\r
- DepexExpressionString += ' AND '\r
- DepexExpressionString += '('\r
- DepexExpressionString += D\r
- DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r
- DepexExpressionString += ')'\r
- Inherited = True\r
- if Inherited:\r
- EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r
- if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r
- break\r
- if len(DepexExpressionString) > 0:\r
- EdkLogger.verbose('')\r
-\r
- return {self.ModuleType:DepexExpressionString}\r
-\r
- # Get the tiano core user extension, it is contain dependent library.\r
- # @retval: a list contain tiano core userextension.\r
- #\r
- def _GetTianoCoreUserExtensionList(self):\r
- TianoCoreUserExtentionList = []\r
- for M in [self.Module] + self.DependentLibraryList:\r
- Filename = M.MetaFile.Path\r
- InfObj = InfSectionParser.InfSectionParser(Filename)\r
- TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r
- for TianoCoreUserExtent in TianoCoreUserExtenList:\r
- for Section in TianoCoreUserExtent:\r
- ItemList = Section.split(TAB_SPLIT)\r
- Arch = self.Arch\r
- if len(ItemList) == 4:\r
- Arch = ItemList[3]\r
- if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r
- TianoCoreList = []\r
- TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r
- TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r
- TianoCoreList.append('\n')\r
- TianoCoreUserExtentionList.append(TianoCoreList)\r
-\r
- return TianoCoreUserExtentionList\r
-\r
- ## Return the list of specification version required for the module\r
- #\r
- # @retval list The list of specification defined in module file\r
- #\r
- @cached_property\r
- def Specification(self):\r
- return self.Module.Specification\r
-\r
- ## Tool option for the module build\r
- #\r
- # @param PlatformInfo The object of PlatformBuildInfo\r
- # @retval dict The dict containing valid options\r
- #\r
- @cached_property\r
- def BuildOption(self):\r
- RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r
- if self.BuildRuleOrder:\r
- self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r
- return RetVal\r
-\r
- ## Get include path list from tool option for the module build\r
- #\r
- # @retval list The include path list\r
- #\r
- @cached_property\r
- def BuildOptionIncPathList(self):\r
- #\r
- # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
- # is the former use /I , the Latter used -I to specify include directories\r
- #\r
- if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r
- BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r
- elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
- BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r
- else:\r
- #\r
- # New ToolChainFamily, don't known whether there is option to specify include directories\r
- #\r
- return []\r
-\r
- RetVal = []\r
- for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r
- try:\r
- FlagOption = self.BuildOption[Tool]['FLAGS']\r
- except KeyError:\r
- FlagOption = ''\r
-\r
- if self.ToolChainFamily != 'RVCT':\r
- IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
- else:\r
- #\r
- # RVCT may specify a list of directory separated by commas\r
- #\r
- IncPathList = []\r
- for Path in BuildOptIncludeRegEx.findall(FlagOption):\r
- PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r
- IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r
-\r
- #\r
- # EDK II modules must not reference header files outside of the packages they depend on or\r
- # within the module's directory tree. Report error if violation.\r
- #\r
- if GlobalData.gDisableIncludePathCheck == False:\r
- for Path in IncPathList:\r
- if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
- ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
- EdkLogger.error("build",\r
- PARAMETER_INVALID,\r
- ExtraData=ErrMsg,\r
- File=str(self.MetaFile))\r
- RetVal += IncPathList\r
- return RetVal\r
-\r
- ## Return a list of files which can be built from source\r
- #\r
- # What kind of files can be built is determined by build rules in\r
- # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r
- #\r
- @cached_property\r
- def SourceFileList(self):\r
- RetVal = []\r
- ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r
- ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r
- for F in self.Module.Sources:\r
- # match tool chain\r
- if F.TagName not in ToolChainTagSet:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r
- "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r
- continue\r
- # match tool chain family or build rule family\r
- if F.ToolChainFamily not in ToolChainFamilySet:\r
- EdkLogger.debug(\r
- EdkLogger.DEBUG_0,\r
- "The file [%s] must be built by tools of [%s], " \\r
- "but current toolchain family is [%s], buildrule family is [%s]" \\r
- % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r
- continue\r
-\r
- # add the file path into search path list for file including\r
- if F.Dir not in self.IncludePathList:\r
- self.IncludePathList.insert(0, F.Dir)\r
- RetVal.append(F)\r
-\r
- self._MatchBuildRuleOrder(RetVal)\r
-\r
- for F in RetVal:\r
- self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r
- return RetVal\r
-\r
- def _MatchBuildRuleOrder(self, FileList):\r
- Order_Dict = {}\r
- self.BuildOption\r
- for SingleFile in FileList:\r
- if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r
- key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r
- if key in Order_Dict:\r
- Order_Dict[key].append(SingleFile.Ext)\r
- else:\r
- Order_Dict[key] = [SingleFile.Ext]\r
-\r
- RemoveList = []\r
- for F in Order_Dict:\r
- if len(Order_Dict[F]) > 1:\r
- Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r
- for Ext in Order_Dict[F][1:]:\r
- RemoveList.append(F + Ext)\r
-\r
- for item in RemoveList:\r
- FileList.remove(item)\r
-\r
- return FileList\r
-\r
- ## Return the list of unicode files\r
- @cached_property\r
- def UnicodeFileList(self):\r
- return self.FileTypes.get(TAB_UNICODE_FILE,[])\r
-\r
- ## Return the list of vfr files\r
- @cached_property\r
- def VfrFileList(self):\r
- return self.FileTypes.get(TAB_VFR_FILE, [])\r
-\r
- ## Return the list of Image Definition files\r
- @cached_property\r
- def IdfFileList(self):\r
- return self.FileTypes.get(TAB_IMAGE_FILE,[])\r
-\r
- ## Return a list of files which can be built from binary\r
- #\r
- # "Build" binary files are just to copy them to build directory.\r
- #\r
- # @retval list The list of files which can be built later\r
- #\r
- @cached_property\r
- def BinaryFileList(self):\r
- RetVal = []\r
- for F in self.Module.Binaries:\r
- if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r
- continue\r
- RetVal.append(F)\r
- self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r
- return RetVal\r
-\r
- @cached_property\r
- def BuildRules(self):\r
- RetVal = {}\r
- BuildRuleDatabase = BuildRule\r
- for Type in BuildRuleDatabase.FileTypeList:\r
- #first try getting build rule by BuildRuleFamily\r
- RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r
- if not RuleObject:\r
- # build type is always module type, but ...\r
- if self.ModuleType != self.BuildType:\r
- RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r
- #second try getting build rule by ToolChainFamily\r
- if not RuleObject:\r
- RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r
- if not RuleObject:\r
- # build type is always module type, but ...\r
- if self.ModuleType != self.BuildType:\r
- RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r
- if not RuleObject:\r
- continue\r
- RuleObject = RuleObject.Instantiate(self.Macros)\r
- RetVal[Type] = RuleObject\r
- for Ext in RuleObject.SourceFileExtList:\r
- RetVal[Ext] = RuleObject\r
- return RetVal\r
-\r
- def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r
- if self._BuildTargets is None:\r
- self._IntroBuildTargetList = set()\r
- self._FinalBuildTargetList = set()\r
- self._BuildTargets = defaultdict(set)\r
- self._FileTypes = defaultdict(set)\r
-\r
- if not BinaryFileList:\r
- BinaryFileList = self.BinaryFileList\r
-\r
- SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r
- if not os.path.exists(SubDirectory):\r
- CreateDirectory(SubDirectory)\r
- LastTarget = None\r
- RuleChain = set()\r
- SourceList = [File]\r
- Index = 0\r
- #\r
- # Make sure to get build rule order value\r
- #\r
- self.BuildOption\r
-\r
- while Index < len(SourceList):\r
- Source = SourceList[Index]\r
- Index = Index + 1\r
-\r
- if Source != File:\r
- CreateDirectory(Source.Dir)\r
-\r
- if File.IsBinary and File == Source and File in BinaryFileList:\r
- # Skip all files that are not binary libraries\r
- if not self.IsLibrary:\r
- continue\r
- RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r
- elif FileType in self.BuildRules:\r
- RuleObject = self.BuildRules[FileType]\r
- elif Source.Ext in self.BuildRules:\r
- RuleObject = self.BuildRules[Source.Ext]\r
- else:\r
- # stop at no more rules\r
- if LastTarget:\r
- self._FinalBuildTargetList.add(LastTarget)\r
- break\r
-\r
- FileType = RuleObject.SourceFileType\r
- self._FileTypes[FileType].add(Source)\r
-\r
- # stop at STATIC_LIBRARY for library\r
- if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r
- if LastTarget:\r
- self._FinalBuildTargetList.add(LastTarget)\r
- break\r
-\r
- Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r
- if not Target:\r
- if LastTarget:\r
- self._FinalBuildTargetList.add(LastTarget)\r
- break\r
- elif not Target.Outputs:\r
- # Only do build for target with outputs\r
- self._FinalBuildTargetList.add(Target)\r
-\r
- self._BuildTargets[FileType].add(Target)\r
-\r
- if not Source.IsBinary and Source == File:\r
- self._IntroBuildTargetList.add(Target)\r
-\r
- # to avoid cyclic rule\r
- if FileType in RuleChain:\r
- break\r
-\r
- RuleChain.add(FileType)\r
- SourceList.extend(Target.Outputs)\r
- LastTarget = Target\r
- FileType = TAB_UNKNOWN_FILE\r
-\r
- @cached_property\r
- def Targets(self):\r
- if self._BuildTargets is None:\r
- self._IntroBuildTargetList = set()\r
- self._FinalBuildTargetList = set()\r
- self._BuildTargets = defaultdict(set)\r
- self._FileTypes = defaultdict(set)\r
-\r
- #TRICK: call SourceFileList property to apply build rule for source files\r
- self.SourceFileList\r
-\r
- #TRICK: call _GetBinaryFileList to apply build rule for binary files\r
- self.BinaryFileList\r
-\r
- return self._BuildTargets\r
-\r
- @cached_property\r
- def IntroTargetList(self):\r
- self.Targets\r
- return self._IntroBuildTargetList\r
-\r
- @cached_property\r
- def CodaTargetList(self):\r
- self.Targets\r
- return self._FinalBuildTargetList\r
-\r
- @cached_property\r
- def FileTypes(self):\r
- self.Targets\r
- return self._FileTypes\r
-\r
- ## Get the list of package object the module depends on\r
- #\r
- # @retval list The package object list\r
- #\r
- @cached_property\r
- def DependentPackageList(self):\r
- return self.Module.Packages\r
-\r
- ## Return the list of auto-generated code file\r
- #\r
- # @retval list The list of auto-generated file\r
- #\r
- @cached_property\r
- def AutoGenFileList(self):\r
- AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r
- UniStringBinBuffer = BytesIO()\r
- IdfGenBinBuffer = BytesIO()\r
- RetVal = {}\r
- AutoGenC = TemplateString()\r
- AutoGenH = TemplateString()\r
- StringH = TemplateString()\r
- StringIdf = TemplateString()\r
- GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r
- #\r
- # AutoGen.c is generated if there are library classes in inf, or there are object files\r
- #\r
- if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r
- or TAB_OBJECT_FILE in self.FileTypes):\r
- AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r
- RetVal[AutoFile] = str(AutoGenC)\r
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if str(AutoGenH) != "":\r
- AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r
- RetVal[AutoFile] = str(AutoGenH)\r
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if str(StringH) != "":\r
- AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
- RetVal[AutoFile] = str(StringH)\r
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
- AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
- RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
- AutoFile.IsBinary = True\r
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer is not None:\r
- UniStringBinBuffer.close()\r
- if str(StringIdf) != "":\r
- AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
- RetVal[AutoFile] = str(StringIdf)\r
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
- AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
- RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
- AutoFile.IsBinary = True\r
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer is not None:\r
- IdfGenBinBuffer.close()\r
- return RetVal\r
-\r
- ## Return the list of library modules explicitly or implicitly used by this module\r
- @cached_property\r
- def DependentLibraryList(self):\r
- # only merge library classes and PCD for non-library module\r
- if self.IsLibrary:\r
- return []\r
- return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r
-\r
- ## Get the list of PCDs from current module\r
- #\r
- # @retval list The list of PCD\r
- #\r
- @cached_property\r
- def ModulePcdList(self):\r
- # apply PCD settings from platform\r
- RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r
- ExtendCopyDictionaryLists(self._PcdComments, self.Module.PcdComments)\r
- return RetVal\r
-\r
- ## Get the list of PCDs from dependent libraries\r
- #\r
- # @retval list The list of PCD\r
- #\r
- @cached_property\r
- def LibraryPcdList(self):\r
- if self.IsLibrary:\r
- return []\r
- RetVal = []\r
- Pcds = set()\r
- # get PCDs from dependent libraries\r
- for Library in self.DependentLibraryList:\r
- PcdsInLibrary = OrderedDict()\r
- ExtendCopyDictionaryLists(self._PcdComments, Library.PcdComments)\r
- for Key in Library.Pcds:\r
- # skip duplicated PCDs\r
- if Key in self.Module.Pcds or Key in Pcds:\r
- continue\r
- Pcds.add(Key)\r
- PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r
- RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r
- return RetVal\r
-\r
- ## Get the GUID value mapping\r
- #\r
- # @retval dict The mapping between GUID cname and its value\r
- #\r
- @cached_property\r
- def GuidList(self):\r
- RetVal = OrderedDict(self.Module.Guids)\r
- for Library in self.DependentLibraryList:\r
- RetVal.update(Library.Guids)\r
- ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r
- ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r
- return RetVal\r
-\r
- @cached_property\r
- def GetGuidsUsedByPcd(self):\r
- RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r
- for Library in self.DependentLibraryList:\r
- RetVal.update(Library.GetGuidsUsedByPcd())\r
- return RetVal\r
- ## Get the protocol value mapping\r
- #\r
- # @retval dict The mapping between protocol cname and its value\r
- #\r
- @cached_property\r
- def ProtocolList(self):\r
- RetVal = OrderedDict(self.Module.Protocols)\r
- for Library in self.DependentLibraryList:\r
- RetVal.update(Library.Protocols)\r
- ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r
- ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r
- return RetVal\r
-\r
- ## Get the PPI value mapping\r
- #\r
- # @retval dict The mapping between PPI cname and its value\r
- #\r
- @cached_property\r
- def PpiList(self):\r
- RetVal = OrderedDict(self.Module.Ppis)\r
- for Library in self.DependentLibraryList:\r
- RetVal.update(Library.Ppis)\r
- ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r
- ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r
- return RetVal\r
-\r
- ## Get the list of include search path\r
- #\r
- # @retval list The list path\r
- #\r
- @cached_property\r
- def IncludePathList(self):\r
- RetVal = []\r
- RetVal.append(self.MetaFile.Dir)\r
- RetVal.append(self.DebugDir)\r
-\r
- for Package in self.Module.Packages:\r
- PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
- if PackageDir not in RetVal:\r
- RetVal.append(PackageDir)\r
- IncludesList = Package.Includes\r
- if Package._PrivateIncludes:\r
- if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r
- IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
- for Inc in IncludesList:\r
- if Inc not in RetVal:\r
- RetVal.append(str(Inc))\r
- return RetVal\r
-\r
- @cached_property\r
- def IncludePathLength(self):\r
- return sum(len(inc)+1 for inc in self.IncludePathList)\r
-\r
- ## Get HII EX PCDs which maybe used by VFR\r
- #\r
- # efivarstore used by VFR may relate with HII EX PCDs\r
- # Get the variable name and GUID from efivarstore and HII EX PCD\r
- # List the HII EX PCDs in As Built INF if both name and GUID match.\r
- #\r
- # @retval list HII EX PCDs\r
- #\r
- def _GetPcdsMaybeUsedByVfr(self):\r
- if not self.SourceFileList:\r
- return []\r
-\r
- NameGuids = set()\r
- for SrcFile in self.SourceFileList:\r
- if SrcFile.Ext.lower() != '.vfr':\r
- continue\r
- Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r
- if not os.path.exists(Vfri):\r
- continue\r
- VfriFile = open(Vfri, 'r')\r
- Content = VfriFile.read()\r
- VfriFile.close()\r
- Pos = Content.find('efivarstore')\r
- while Pos != -1:\r
- #\r
- # Make sure 'efivarstore' is the start of efivarstore statement\r
- # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r
- #\r
- Index = Pos - 1\r
- while Index >= 0 and Content[Index] in ' \t\r\n':\r
- Index -= 1\r
- if Index >= 0 and Content[Index] != ';':\r
- Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r
- continue\r
- #\r
- # 'efivarstore' must be followed by name and guid\r
- #\r
- Name = gEfiVarStoreNamePattern.search(Content, Pos)\r
- if not Name:\r
- break\r
- Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r
- if not Guid:\r
- break\r
- NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
- NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r
- Pos = Content.find('efivarstore', Name.end())\r
- if not NameGuids:\r
- return []\r
- HiiExPcds = []\r
- for Pcd in self.PlatformInfo.Platform.Pcds.values():\r
- if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r
- continue\r
- for SkuInfo in Pcd.SkuInfoList.values():\r
- Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r
- if not Value:\r
- continue\r
- Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r
- Guid = GuidStructureStringToGuidString(Value)\r
- if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r
- HiiExPcds.append(Pcd)\r
- break\r
-\r
- return HiiExPcds\r
-\r
- def _GenOffsetBin(self):\r
- VfrUniBaseName = {}\r
- for SourceFile in self.Module.Sources:\r
- if SourceFile.Type.upper() == ".VFR" :\r
- #\r
- # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r
- #\r
- VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r
- elif SourceFile.Type.upper() == ".UNI" :\r
- #\r
- # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r
- #\r
- VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r
-\r
- if not VfrUniBaseName:\r
- return None\r
- MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r
- EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r
- VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r
- if not VfrUniOffsetList:\r
- return None\r
-\r
- OutputName = '%sOffset.bin' % self.Name\r
- UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r
-\r
- try:\r
- fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r
- except:\r
- EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
-\r
- # Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO()\r
-\r
- for Item in VfrUniOffsetList:\r
- if (Item[0].find("Strings") != -1):\r
- #\r
- # UNI offset in image.\r
- # GUID + Offset\r
- # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
- #\r
- UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
- fStringIO.write(UniGuid)\r
- UniValue = pack ('Q', int (Item[1], 16))\r
- fStringIO.write (UniValue)\r
- else:\r
- #\r
- # VFR binary offset in image.\r
- # GUID + Offset\r
- # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
- #\r
- VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
- fStringIO.write(VfrGuid)\r
- VfrValue = pack ('Q', int (Item[1], 16))\r
- fStringIO.write (VfrValue)\r
- #\r
- # write data into file.\r
- #\r
- try :\r
- fInputfile.write (fStringIO.getvalue())\r
- except:\r
- EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r
- "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r
-\r
- fStringIO.close ()\r
- fInputfile.close ()\r
- return OutputName\r
-\r
- @cached_property\r
- def OutputFile(self):\r
- retVal = set()\r
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
- DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
- for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- retVal.add(File)\r
- if self.DepexGenerated:\r
- retVal.add(self.Name + '.depex')\r
-\r
- Bin = self._GenOffsetBin()\r
- if Bin:\r
- retVal.add(Bin)\r
-\r
- for Root, Dirs, Files in os.walk(OutputDir):\r
- for File in Files:\r
- if File.lower().endswith('.pdb'):\r
- retVal.add(File)\r
-\r
- return retVal\r
-\r
- ## Create AsBuilt INF file the module\r
- #\r
- def CreateAsBuiltInf(self):\r
-\r
- if self.IsAsBuiltInfCreated:\r
- return\r
-\r
- # Skip INF file generation for libraries\r
- if self.IsLibrary:\r
- return\r
-\r
- # Skip the following code for modules with no source files\r
- if not self.SourceFileList:\r
- return\r
-\r
- # Skip the following code for modules without any binary files\r
- if self.BinaryFileList:\r
- return\r
-\r
- ### TODO: How to handles mixed source and binary modules\r
-\r
- # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r
- # Also find all packages that the DynamicEx PCDs depend on\r
- Pcds = []\r
- PatchablePcds = []\r
- Packages = []\r
- PcdCheckList = []\r
- PcdTokenSpaceList = []\r
- for Pcd in self.ModulePcdList + self.LibraryPcdList:\r
- if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r
- PatchablePcds.append(Pcd)\r
- PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r
- elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
- if Pcd not in Pcds:\r
- Pcds.append(Pcd)\r
- PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r
- PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r
- PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r
- GuidList = OrderedDict(self.GuidList)\r
- for TokenSpace in self.GetGuidsUsedByPcd:\r
- # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r
- # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r
- if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r
- GuidList.pop(TokenSpace)\r
- CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r
- for Package in self.DerivedPackageList:\r
- if Package in Packages:\r
- continue\r
- BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r
- Found = False\r
- for Index in range(len(BeChecked)):\r
- for Item in CheckList[Index]:\r
- if Item in BeChecked[Index]:\r
- Packages.append(Package)\r
- Found = True\r
- break\r
- if Found:\r
- break\r
-\r
- VfrPcds = self._GetPcdsMaybeUsedByVfr()\r
- for Pkg in self.PlatformInfo.PackageList:\r
- if Pkg in Packages:\r
- continue\r
- for VfrPcd in VfrPcds:\r
- if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r
- (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r
- Packages.append(Pkg)\r
- break\r
-\r
- ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r
- DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r
- Guid = self.Guid\r
- MDefs = self.Module.Defines\r
-\r
- AsBuiltInfDict = {\r
- 'module_name' : self.Name,\r
- 'module_guid' : Guid,\r
- 'module_module_type' : ModuleType,\r
- 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r
- 'pcd_is_driver_string' : [],\r
- 'module_uefi_specification_version' : [],\r
- 'module_pi_specification_version' : [],\r
- 'module_entry_point' : self.Module.ModuleEntryPointList,\r
- 'module_unload_image' : self.Module.ModuleUnloadImageList,\r
- 'module_constructor' : self.Module.ConstructorList,\r
- 'module_destructor' : self.Module.DestructorList,\r
- 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r
- 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r
- 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r
- 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r
- 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r
- 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r
- 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r
- 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r
- 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r
- 'module_arch' : self.Arch,\r
- 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r
- 'binary_item' : [],\r
- 'patchablepcd_item' : [],\r
- 'pcd_item' : [],\r
- 'protocol_item' : [],\r
- 'ppi_item' : [],\r
- 'guid_item' : [],\r
- 'flags_item' : [],\r
- 'libraryclasses_item' : []\r
- }\r
-\r
- if 'MODULE_UNI_FILE' in MDefs:\r
- UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r
- if os.path.isfile(UNIFile):\r
- shutil.copy2(UNIFile, self.OutputDir)\r
-\r
- if self.AutoGenVersion > int(gInfSpecVersion, 0):\r
- AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r
- else:\r
- AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r
-\r
- if DriverType:\r
- AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r
-\r
- if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r
- AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r
- if 'PI_SPECIFICATION_VERSION' in self.Specification:\r
- AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r
-\r
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
- DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
- for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
- if os.path.isabs(File):\r
- File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
- if Item.Target.Ext.lower() == '.aml':\r
- AsBuiltInfDict['binary_item'].append('ASL|' + File)\r
- elif Item.Target.Ext.lower() == '.acpi':\r
- AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r
- elif Item.Target.Ext.lower() == '.efi':\r
- AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r
- else:\r
- AsBuiltInfDict['binary_item'].append('BIN|' + File)\r
- if not self.DepexGenerated:\r
- DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r
- if os.path.exists(DepexFile):\r
- self.DepexGenerated = True\r
- if self.DepexGenerated:\r
- if self.ModuleType in [SUP_MODULE_PEIM]:\r
- AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r
- elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r
- AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r
- elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r
- AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r
-\r
- Bin = self._GenOffsetBin()\r
- if Bin:\r
- AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r
-\r
- for Root, Dirs, Files in os.walk(OutputDir):\r
- for File in Files:\r
- if File.lower().endswith('.pdb'):\r
- AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r
- HeaderComments = self.Module.HeaderComments\r
- StartPos = 0\r
- for Index in range(len(HeaderComments)):\r
- if HeaderComments[Index].find('@BinaryHeader') != -1:\r
- HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r
- StartPos = Index\r
- break\r
- AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r
- AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r
-\r
- GenList = [\r
- (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r
- (self.PpiList, self._PpiComments, 'ppi_item'),\r
- (GuidList, self._GuidComments, 'guid_item')\r
- ]\r
- for Item in GenList:\r
- for CName in Item[0]:\r
- Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r
- Entry = Comments + '\n ' + CName if Comments else CName\r
- AsBuiltInfDict[Item[2]].append(Entry)\r
- PatchList = parsePcdInfoFromMapFile(\r
- os.path.join(self.OutputDir, self.Name + '.map'),\r
- os.path.join(self.OutputDir, self.Name + '.efi')\r
- )\r
- if PatchList:\r
- for Pcd in PatchablePcds:\r
- TokenCName = Pcd.TokenCName\r
- for PcdItem in GlobalData.MixedPcd:\r
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
- TokenCName = PcdItem[0]\r
- break\r
- for PatchPcd in PatchList:\r
- if TokenCName == PatchPcd[0]:\r
- break\r
- else:\r
- continue\r
- PcdValue = ''\r
- if Pcd.DatumType == 'BOOLEAN':\r
- BoolValue = Pcd.DefaultValue.upper()\r
- if BoolValue == 'TRUE':\r
- Pcd.DefaultValue = '1'\r
- elif BoolValue == 'FALSE':\r
- Pcd.DefaultValue = '0'\r
-\r
- if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r
- HexFormat = '0x%02x'\r
- if Pcd.DatumType == TAB_UINT16:\r
- HexFormat = '0x%04x'\r
- elif Pcd.DatumType == TAB_UINT32:\r
- HexFormat = '0x%08x'\r
- elif Pcd.DatumType == TAB_UINT64:\r
- HexFormat = '0x%016x'\r
- PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r
- else:\r
- if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
- EdkLogger.error("build", AUTOGEN_ERROR,\r
- "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r
- )\r
- ArraySize = int(Pcd.MaxDatumSize, 0)\r
- PcdValue = Pcd.DefaultValue\r
- if PcdValue[0] != '{':\r
- Unicode = False\r
- if PcdValue[0] == 'L':\r
- Unicode = True\r
- PcdValue = PcdValue.lstrip('L')\r
- PcdValue = eval(PcdValue)\r
- NewValue = '{'\r
- for Index in range(0, len(PcdValue)):\r
- if Unicode:\r
- CharVal = ord(PcdValue[Index])\r
- NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r
- + '0x%02x' % (CharVal >> 8) + ', '\r
- else:\r
- NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r
- Padding = '0x00, '\r
- if Unicode:\r
- Padding = Padding * 2\r
- ArraySize = ArraySize // 2\r
- if ArraySize < (len(PcdValue) + 1):\r
- if Pcd.MaxSizeUserSet:\r
- EdkLogger.error("build", AUTOGEN_ERROR,\r
- "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
- )\r
- else:\r
- ArraySize = len(PcdValue) + 1\r
- if ArraySize > len(PcdValue) + 1:\r
- NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r
- PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r
- elif len(PcdValue.split(',')) <= ArraySize:\r
- PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r
- PcdValue += '}'\r
- else:\r
- if Pcd.MaxSizeUserSet:\r
- EdkLogger.error("build", AUTOGEN_ERROR,\r
- "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
- )\r
- else:\r
- ArraySize = len(PcdValue) + 1\r
- PcdItem = '%s.%s|%s|0x%X' % \\r
- (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r
- PcdComments = ''\r
- if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
- PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r
- if PcdComments:\r
- PcdItem = PcdComments + '\n ' + PcdItem\r
- AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r
-\r
- for Pcd in Pcds + VfrPcds:\r
- PcdCommentList = []\r
- HiiInfo = ''\r
- TokenCName = Pcd.TokenCName\r
- for PcdItem in GlobalData.MixedPcd:\r
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
- TokenCName = PcdItem[0]\r
- break\r
- if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r
- for SkuName in Pcd.SkuInfoList:\r
- SkuInfo = Pcd.SkuInfoList[SkuName]\r
- HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r
- break\r
- if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
- PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r
- if HiiInfo:\r
- UsageIndex = -1\r
- UsageStr = ''\r
- for Index, Comment in enumerate(PcdCommentList):\r
- for Usage in UsageList:\r
- if Comment.find(Usage) != -1:\r
- UsageStr = Usage\r
- UsageIndex = Index\r
- break\r
- if UsageIndex != -1:\r
- PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r
- else:\r
- PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r
- PcdComments = '\n '.join(PcdCommentList)\r
- PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r
- if PcdComments:\r
- PcdEntry = PcdComments + '\n ' + PcdEntry\r
- AsBuiltInfDict['pcd_item'].append(PcdEntry)\r
- for Item in self.BuildOption:\r
- if 'FLAGS' in self.BuildOption[Item]:\r
- AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r
-\r
- # Generated LibraryClasses section in comments.\r
- for Library in self.LibraryAutoGenList:\r
- AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r
-\r
- # Generated UserExtensions TianoCore section.\r
- # All tianocore user extensions are copied.\r
- UserExtStr = ''\r
- for TianoCore in self._GetTianoCoreUserExtensionList():\r
- UserExtStr += '\n'.join(TianoCore)\r
- ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r
- if os.path.isfile(ExtensionFile):\r
- shutil.copy2(ExtensionFile, self.OutputDir)\r
- AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
-\r
- # Generated depex expression section in comments.\r
- DepexExpression = self._GetDepexExpresionString()\r
- AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r
-\r
- AsBuiltInf = TemplateString()\r
- AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r
-\r
- SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r
-\r
- self.IsAsBuiltInfCreated = True\r
-\r
- def CopyModuleToCache(self):\r
- FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- CreateDirectory (FileDir)\r
- HashFile = path.join(self.BuildDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- CopyFileOnChange(HashFile, FileDir)\r
- ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
- if os.path.exists(ModuleFile):\r
- CopyFileOnChange(ModuleFile, FileDir)\r
-\r
- if not self.OutputFile:\r
- Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
- self.OutputFile = Ma.Binaries\r
-\r
- for File in self.OutputFile:\r
- File = str(File)\r
- if not os.path.isabs(File):\r
- File = os.path.join(self.OutputDir, File)\r
- if os.path.exists(File):\r
- sub_dir = os.path.relpath(File, self.OutputDir)\r
- destination_file = os.path.join(FileDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
-\r
- def AttemptModuleCacheCopy(self):\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
- return False\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- return False\r
- FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
- HashFile = path.join(FileDir, self.Name + '.hash')\r
- if os.path.exists(HashFile):\r
- f = open(HashFile, 'r')\r
- CacheHash = f.read()\r
- f.close()\r
- self.GenModuleHash()\r
- if GlobalData.gModuleHash[self.Arch][self.Name]:\r
- if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r
- for root, dir, files in os.walk(FileDir):\r
- for f in files:\r
- if self.Name + '.hash' in f:\r
- CopyFileOnChange(HashFile, self.BuildDir)\r
- else:\r
- File = path.join(root, f)\r
- sub_dir = os.path.relpath(File, FileDir)\r
- destination_file = os.path.join(self.OutputDir, sub_dir)\r
- destination_dir = os.path.dirname(destination_file)\r
- CreateDirectory(destination_dir)\r
- CopyFileOnChange(File, destination_dir)\r
- if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- return True\r
- return False\r
-\r
- ## Create makefile for the module and its dependent libraries\r
- #\r
- # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
- # dependent libraries will be created\r
- #\r
- @cached_class_function\r
- def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
- # nest this function inside its only caller.\r
- def CreateTimeStamp():\r
- FileSet = {self.MetaFile.Path}\r
-\r
- for SourceFile in self.Module.Sources:\r
- FileSet.add (SourceFile.Path)\r
-\r
- for Lib in self.DependentLibraryList:\r
- FileSet.add (Lib.MetaFile.Path)\r
-\r
- for f in self.AutoGenDepSet:\r
- FileSet.add (f.Path)\r
-\r
- if os.path.exists (self.TimeStampPath):\r
- os.remove (self.TimeStampPath)\r
- with open(self.TimeStampPath, 'w+') as file:\r
- for f in FileSet:\r
- print(f, file=file)\r
-\r
- # Ignore generating makefile when it is a binary module\r
- if self.IsBinaryModule:\r
- return\r
-\r
- self.GenFfsList = GenFfsList\r
- if not self.IsLibrary and CreateLibraryMakeFile:\r
- for LibraryAutoGen in self.LibraryAutoGenList:\r
- LibraryAutoGen.CreateMakeFile()\r
-\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
- return\r
-\r
- if len(self.CustomMakefile) == 0:\r
- Makefile = GenMake.ModuleMakefile(self)\r
- else:\r
- Makefile = GenMake.CustomMakefile(self)\r
- if Makefile.Generate():\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r
- (self.Name, self.Arch))\r
- else:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r
- (self.Name, self.Arch))\r
-\r
- CreateTimeStamp()\r
-\r
- def CopyBinaryFiles(self):\r
- for File in self.Module.Binaries:\r
- SrcPath = File.Path\r
- DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r
- CopyLongFilePath(SrcPath, DstPath)\r
- ## Create autogen code for the module and its dependent libraries\r
- #\r
- # @param CreateLibraryCodeFile Flag indicating if or not the code of\r
- # dependent libraries will be created\r
- #\r
- def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
- if self.IsCodeFileCreated:\r
- return\r
-\r
- # Need to generate PcdDatabase even PcdDriver is binarymodule\r
- if self.IsBinaryModule and self.PcdIsDriver != '':\r
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
- return\r
- if self.IsBinaryModule:\r
- if self.IsLibrary:\r
- self.CopyBinaryFiles()\r
- return\r
-\r
- if not self.IsLibrary and CreateLibraryCodeFile:\r
- for LibraryAutoGen in self.LibraryAutoGenList:\r
- LibraryAutoGen.CreateCodeFile()\r
-\r
- # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r
- if not GlobalData.gUseHashCache and self.CanSkip():\r
- return\r
-\r
- AutoGenList = []\r
- IgoredAutoGenList = []\r
-\r
- for File in self.AutoGenFileList:\r
- if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r
- AutoGenList.append(str(File))\r
- else:\r
- IgoredAutoGenList.append(str(File))\r
-\r
-\r
- for ModuleType in self.DepexList:\r
- # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r
- if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:\r
- continue\r
-\r
- Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r
- DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r
-\r
- if len(Dpx.PostfixNotation) != 0:\r
- self.DepexGenerated = True\r
-\r
- if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r
- AutoGenList.append(str(DpxFile))\r
- else:\r
- IgoredAutoGenList.append(str(DpxFile))\r
-\r
- if IgoredAutoGenList == []:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r
- (" ".join(AutoGenList), self.Name, self.Arch))\r
- elif AutoGenList == []:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r
- (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r
- else:\r
- EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r
- (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
-\r
- self.IsCodeFileCreated = True\r
- return AutoGenList\r
-\r
- ## Summarize the ModuleAutoGen objects of all libraries used by this module\r
- @cached_property\r
- def LibraryAutoGenList(self):\r
- RetVal = []\r
- for Library in self.DependentLibraryList:\r
- La = ModuleAutoGen(\r
- self.Workspace,\r
- Library.MetaFile,\r
- self.BuildTarget,\r
- self.ToolChain,\r
- self.Arch,\r
- self.PlatformInfo.MetaFile\r
- )\r
- if La not in RetVal:\r
- RetVal.append(La)\r
- for Lib in La.CodaTargetList:\r
- self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
- return RetVal\r
-\r
- def GenModuleHash(self):\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gModuleHash:\r
- GlobalData.gModuleHash[self.Arch] = {}\r
-\r
- # Early exit if module or library has been hashed and is in memory\r
- if self.Name in GlobalData.gModuleHash[self.Arch]:\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
-\r
- # Initialze hash object\r
- m = hashlib.md5()\r
-\r
- # Add Platform level hash\r
- m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
-\r
- # Add Package level hash\r
- if self.DependentPackageList:\r
- for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
- if Pkg.PackageName in GlobalData.gPackageHash:\r
- m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
-\r
- # Add Library hash\r
- if self.LibraryAutoGenList:\r
- for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
- if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
- Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
-\r
- # Add Module self\r
- f = open(str(self.MetaFile), 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
-\r
- # Add Module's source files\r
- if self.SourceFileList:\r
- for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'rb')\r
- Content = f.read()\r
- f.close()\r
- m.update(Content)\r
-\r
- GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
-\r
- return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
-\r
- ## Decide whether we can skip the ModuleAutoGen process\r
- def CanSkipbyHash(self):\r
- # Hashing feature is off\r
- if not GlobalData.gUseHashCache:\r
- return False\r
-\r
- # Initialize a dictionary for each arch type\r
- if self.Arch not in GlobalData.gBuildHashSkipTracking:\r
- GlobalData.gBuildHashSkipTracking[self.Arch] = dict()\r
-\r
- # If library or Module is binary do not skip by hash\r
- if self.IsBinaryModule:\r
- return False\r
-\r
- # .inc is contains binary information so do not skip by hash as well\r
- for f_ext in self.SourceFileList:\r
- if '.inc' in str(f_ext):\r
- return False\r
-\r
- # Use Cache, if exists and if Module has a copy in cache\r
- if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy():\r
- return True\r
-\r
- # Early exit for libraries that haven't yet finished building\r
- HashFile = path.join(self.BuildDir, self.Name + ".hash")\r
- if self.IsLibrary and not os.path.exists(HashFile):\r
- return False\r
-\r
- # Return a Boolean based on if can skip by hash, either from memory or from IO.\r
- if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]:\r
- # If hashes are the same, SaveFileOnChange() will return False.\r
- GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True)\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
- else:\r
- return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r
-\r
- ## Decide whether we can skip the ModuleAutoGen process\r
- # If any source file is newer than the module than we cannot skip\r
- #\r
- def CanSkip(self):\r
- if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
- return True\r
- if not os.path.exists(self.TimeStampPath):\r
- return False\r
- #last creation time of the module\r
- DstTimeStamp = os.stat(self.TimeStampPath)[8]\r
-\r
- SrcTimeStamp = self.Workspace._SrcTimeStamp\r
- if SrcTimeStamp > DstTimeStamp:\r
- return False\r
-\r
- with open(self.TimeStampPath,'r') as f:\r
- for source in f:\r
- source = source.rstrip('\n')\r
- if not os.path.exists(source):\r
- return False\r
- if source not in ModuleAutoGen.TimeDict :\r
- ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r
- if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r
- return False\r
- GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r
- return True\r
-\r
- @cached_property\r
- def TimeStampPath(self):\r
- return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r
+ @classmethod\r
+ def Cache(cls):\r
+ return cls.__ObjectCache\r
+\r
+#\r
+# The priority list while override build option\r
+#\r
+PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)\r
+ "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r
+ "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r
+ "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r
+ "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r
+ "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE\r
+ "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE\r
+ "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r
+ "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r
+ "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE\r
+ "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE\r
+ "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE\r
+ "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE\r
+ "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE\r
+ "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)\r
+## Calculate the priority value of the build option\r
+#\r
+# @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r
+#\r
+# @retval Value Priority value based on the priority list.\r
+#\r
+def CalculatePriorityValue(Key):\r
+ Target, ToolChain, Arch, CommandType, Attr = Key.split('_')\r
+ PriorityValue = 0x11111\r
+ if Target == TAB_STAR:\r
+ PriorityValue &= 0x01111\r
+ if ToolChain == TAB_STAR:\r
+ PriorityValue &= 0x10111\r
+ if Arch == TAB_STAR:\r
+ PriorityValue &= 0x11011\r
+ if CommandType == TAB_STAR:\r
+ PriorityValue &= 0x11101\r
+ if Attr == TAB_STAR:\r
+ PriorityValue &= 0x11110\r
+\r
+ return PrioList["0x%0.5x" % PriorityValue]\r