]>
Commit | Line | Data |
---|---|---|
1 | ## @file\r | |
2 | # Generate AutoGen.h, AutoGen.c and *.depex files\r | |
3 | #\r | |
4 | # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>\r | |
5 | # Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>\r | |
6 | #\r | |
7 | # This program and the accompanying materials\r | |
8 | # are licensed and made available under the terms and conditions of the BSD License\r | |
9 | # which accompanies this distribution. The full text of the license may be found at\r | |
10 | # http://opensource.org/licenses/bsd-license.php\r | |
11 | #\r | |
12 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
13 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
14 | #\r | |
15 | \r | |
16 | ## Import Modules\r | |
17 | #\r | |
18 | from __future__ import print_function\r | |
19 | from __future__ import absolute_import\r | |
20 | import Common.LongFilePathOs as os\r | |
21 | import re\r | |
22 | import os.path as path\r | |
23 | import copy\r | |
24 | import uuid\r | |
25 | \r | |
26 | from . import GenC\r | |
27 | from . import GenMake\r | |
28 | from . import GenDepex\r | |
29 | from io import BytesIO\r | |
30 | \r | |
31 | from .StrGather import *\r | |
32 | from .BuildEngine import BuildRule\r | |
33 | \r | |
34 | from Common.LongFilePathSupport import CopyLongFilePath\r | |
35 | from Common.BuildToolError import *\r | |
36 | from Common.DataType import *\r | |
37 | from Common.Misc import *\r | |
38 | from Common.StringUtils import *\r | |
39 | import Common.GlobalData as GlobalData\r | |
40 | from GenFds.FdfParser import *\r | |
41 | from CommonDataClass.CommonClass import SkuInfoClass\r | |
42 | from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r | |
43 | import Common.VpdInfoFile as VpdInfoFile\r | |
44 | from .GenPcdDb import CreatePcdDatabaseCode\r | |
45 | from Workspace.MetaFileCommentParser import UsageList\r | |
46 | from Workspace.WorkspaceCommon import GetModuleLibInstances\r | |
47 | from Common.MultipleWorkspace import MultipleWorkspace as mws\r | |
48 | from . import InfSectionParser\r | |
49 | import datetime\r | |
50 | import hashlib\r | |
51 | from .GenVar import VariableMgr, var_info\r | |
52 | from collections import OrderedDict\r | |
53 | from collections import defaultdict\r | |
54 | from Workspace.WorkspaceCommon import OrderedListDict\r | |
55 | \r | |
56 | from Common.caching import cached_property, cached_class_function\r | |
57 | \r | |
58 | ## Regular expression for splitting Dependency Expression string into tokens\r | |
59 | gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")\r | |
60 | \r | |
61 | ## Regular expression for match: PCD(xxxx.yyy)\r | |
62 | gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")\r | |
63 | \r | |
64 | #\r | |
65 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
66 | # is the former use /I , the Latter used -I to specify include directories\r | |
67 | #\r | |
68 | gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
69 | gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
70 | \r | |
71 | #\r | |
72 | # Match name = variable\r | |
73 | #\r | |
74 | gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r | |
75 | #\r | |
76 | # The format of guid in efivarstore statement likes following and must be correct:\r | |
77 | # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r | |
78 | #\r | |
79 | gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r | |
80 | \r | |
81 | ## Mapping Makefile type\r | |
82 | gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r | |
83 | \r | |
84 | \r | |
85 | ## Build rule configuration file\r | |
86 | gDefaultBuildRuleFile = 'build_rule.txt'\r | |
87 | \r | |
88 | ## Tools definition configuration file\r | |
89 | gDefaultToolsDefFile = 'tools_def.txt'\r | |
90 | \r | |
91 | ## Build rule default version\r | |
92 | AutoGenReqBuildRuleVerNum = "0.1"\r | |
93 | \r | |
94 | ## default file name for AutoGen\r | |
95 | gAutoGenCodeFileName = "AutoGen.c"\r | |
96 | gAutoGenHeaderFileName = "AutoGen.h"\r | |
97 | gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r | |
98 | gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r | |
99 | gAutoGenDepexFileName = "%(module_name)s.depex"\r | |
100 | gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r | |
101 | gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r | |
102 | gInfSpecVersion = "0x00010017"\r | |
103 | \r | |
104 | #\r | |
105 | # Template string to generic AsBuilt INF\r | |
106 | #\r | |
107 | gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r | |
108 | \r | |
109 | # DO NOT EDIT\r | |
110 | # FILE auto-generated\r | |
111 | \r | |
112 | [Defines]\r | |
113 | INF_VERSION = ${module_inf_version}\r | |
114 | BASE_NAME = ${module_name}\r | |
115 | FILE_GUID = ${module_guid}\r | |
116 | MODULE_TYPE = ${module_module_type}${BEGIN}\r | |
117 | VERSION_STRING = ${module_version_string}${END}${BEGIN}\r | |
118 | PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r | |
119 | UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r | |
120 | PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r | |
121 | ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r | |
122 | UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r | |
123 | CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r | |
124 | DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r | |
125 | SHADOW = ${module_shadow}${END}${BEGIN}\r | |
126 | PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r | |
127 | PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r | |
128 | PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r | |
129 | PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r | |
130 | BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r | |
131 | SPEC = ${module_spec}${END}${BEGIN}\r | |
132 | UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r | |
133 | MODULE_UNI_FILE = ${module_uni_file}${END}\r | |
134 | \r | |
135 | [Packages.${module_arch}]${BEGIN}\r | |
136 | ${package_item}${END}\r | |
137 | \r | |
138 | [Binaries.${module_arch}]${BEGIN}\r | |
139 | ${binary_item}${END}\r | |
140 | \r | |
141 | [PatchPcd.${module_arch}]${BEGIN}\r | |
142 | ${patchablepcd_item}\r | |
143 | ${END}\r | |
144 | \r | |
145 | [Protocols.${module_arch}]${BEGIN}\r | |
146 | ${protocol_item}\r | |
147 | ${END}\r | |
148 | \r | |
149 | [Ppis.${module_arch}]${BEGIN}\r | |
150 | ${ppi_item}\r | |
151 | ${END}\r | |
152 | \r | |
153 | [Guids.${module_arch}]${BEGIN}\r | |
154 | ${guid_item}\r | |
155 | ${END}\r | |
156 | \r | |
157 | [PcdEx.${module_arch}]${BEGIN}\r | |
158 | ${pcd_item}\r | |
159 | ${END}\r | |
160 | \r | |
161 | [LibraryClasses.${module_arch}]\r | |
162 | ## @LIB_INSTANCES${BEGIN}\r | |
163 | # ${libraryclasses_item}${END}\r | |
164 | \r | |
165 | ${depexsection_item}\r | |
166 | \r | |
167 | ${userextension_tianocore_item}\r | |
168 | \r | |
169 | ${tail_comments}\r | |
170 | \r | |
171 | [BuildOptions.${module_arch}]\r | |
172 | ## @AsBuilt${BEGIN}\r | |
173 | ## ${flags_item}${END}\r | |
174 | """)\r | |
175 | \r | |
176 | ## Base class for AutoGen\r | |
177 | #\r | |
178 | # This class just implements the cache mechanism of AutoGen objects.\r | |
179 | #\r | |
180 | class AutoGen(object):\r | |
181 | # database to maintain the objects in each child class\r | |
182 | __ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object\r | |
183 | \r | |
184 | ## Factory method\r | |
185 | #\r | |
186 | # @param Class class object of real AutoGen class\r | |
187 | # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)\r | |
188 | # @param Workspace Workspace directory or WorkspaceAutoGen object\r | |
189 | # @param MetaFile The path of meta file\r | |
190 | # @param Target Build target\r | |
191 | # @param Toolchain Tool chain name\r | |
192 | # @param Arch Target arch\r | |
193 | # @param *args The specific class related parameters\r | |
194 | # @param **kwargs The specific class related dict parameters\r | |
195 | #\r | |
196 | def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
197 | # check if the object has been created\r | |
198 | Key = (Target, Toolchain, Arch, MetaFile)\r | |
199 | if Key in cls.__ObjectCache:\r | |
200 | # if it exists, just return it directly\r | |
201 | return cls.__ObjectCache[Key]\r | |
202 | # it didnt exist. create it, cache it, then return it\r | |
203 | RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)\r | |
204 | return RetVal\r | |
205 | \r | |
206 | def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
207 | super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
208 | \r | |
209 | ## hash() operator\r | |
210 | #\r | |
211 | # The file path of platform file will be used to represent hash value of this object\r | |
212 | #\r | |
213 | # @retval int Hash value of the file path of platform file\r | |
214 | #\r | |
215 | def __hash__(self):\r | |
216 | return hash(self.MetaFile)\r | |
217 | \r | |
218 | ## str() operator\r | |
219 | #\r | |
220 | # The file path of platform file will be used to represent this object\r | |
221 | #\r | |
222 | # @retval string String of platform file path\r | |
223 | #\r | |
224 | def __str__(self):\r | |
225 | return str(self.MetaFile)\r | |
226 | \r | |
227 | ## "==" operator\r | |
228 | def __eq__(self, Other):\r | |
229 | return Other and self.MetaFile == Other\r | |
230 | \r | |
231 | ## Workspace AutoGen class\r | |
232 | #\r | |
233 | # This class is used mainly to control the whole platform build for different\r | |
234 | # architecture. This class will generate top level makefile.\r | |
235 | #\r | |
236 | class WorkspaceAutoGen(AutoGen):\r | |
237 | # call super().__init__ then call the worker function with different parameter count\r | |
238 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
239 | if not hasattr(self, "_Init"):\r | |
240 | super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
241 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
242 | self._Init = True\r | |
243 | \r | |
244 | ## Initialize WorkspaceAutoGen\r | |
245 | #\r | |
246 | # @param WorkspaceDir Root directory of workspace\r | |
247 | # @param ActivePlatform Meta-file of active platform\r | |
248 | # @param Target Build target\r | |
249 | # @param Toolchain Tool chain name\r | |
250 | # @param ArchList List of architecture of current build\r | |
251 | # @param MetaFileDb Database containing meta-files\r | |
252 | # @param BuildConfig Configuration of build\r | |
253 | # @param ToolDefinition Tool chain definitions\r | |
254 | # @param FlashDefinitionFile File of flash definition\r | |
255 | # @param Fds FD list to be generated\r | |
256 | # @param Fvs FV list to be generated\r | |
257 | # @param Caps Capsule list to be generated\r | |
258 | # @param SkuId SKU id from command line\r | |
259 | #\r | |
260 | def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,\r | |
261 | BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,\r | |
262 | Progress=None, BuildModule=None):\r | |
263 | self.BuildDatabase = MetaFileDb\r | |
264 | self.MetaFile = ActivePlatform\r | |
265 | self.WorkspaceDir = WorkspaceDir\r | |
266 | self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]\r | |
267 | GlobalData.gActivePlatform = self.Platform\r | |
268 | self.BuildTarget = Target\r | |
269 | self.ToolChain = Toolchain\r | |
270 | self.ArchList = ArchList\r | |
271 | self.SkuId = SkuId\r | |
272 | self.UniFlag = UniFlag\r | |
273 | \r | |
274 | self.TargetTxt = BuildConfig\r | |
275 | self.ToolDef = ToolDefinition\r | |
276 | self.FdfFile = FlashDefinitionFile\r | |
277 | self.FdTargetList = Fds if Fds else []\r | |
278 | self.FvTargetList = Fvs if Fvs else []\r | |
279 | self.CapTargetList = Caps if Caps else []\r | |
280 | self.AutoGenObjectList = []\r | |
281 | self._GuidDict = {}\r | |
282 | \r | |
283 | # there's many relative directory operations, so ...\r | |
284 | os.chdir(self.WorkspaceDir)\r | |
285 | \r | |
286 | #\r | |
287 | # Merge Arch\r | |
288 | #\r | |
289 | if not self.ArchList:\r | |
290 | ArchList = set(self.Platform.SupArchList)\r | |
291 | else:\r | |
292 | ArchList = set(self.ArchList) & set(self.Platform.SupArchList)\r | |
293 | if not ArchList:\r | |
294 | EdkLogger.error("build", PARAMETER_INVALID,\r | |
295 | ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))\r | |
296 | elif self.ArchList and len(ArchList) != len(self.ArchList):\r | |
297 | SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))\r | |
298 | EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"\r | |
299 | % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))\r | |
300 | self.ArchList = tuple(ArchList)\r | |
301 | \r | |
302 | # Validate build target\r | |
303 | if self.BuildTarget not in self.Platform.BuildTargets:\r | |
304 | EdkLogger.error("build", PARAMETER_INVALID,\r | |
305 | ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"\r | |
306 | % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))\r | |
307 | \r | |
308 | \r | |
309 | # parse FDF file to get PCDs in it, if any\r | |
310 | if not self.FdfFile:\r | |
311 | self.FdfFile = self.Platform.FlashDefinition\r | |
312 | \r | |
313 | EdkLogger.info("")\r | |
314 | if self.ArchList:\r | |
315 | EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))\r | |
316 | EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))\r | |
317 | EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))\r | |
318 | \r | |
319 | EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))\r | |
320 | if BuildModule:\r | |
321 | EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))\r | |
322 | \r | |
323 | if self.FdfFile:\r | |
324 | EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))\r | |
325 | \r | |
326 | EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)\r | |
327 | \r | |
328 | if Progress:\r | |
329 | Progress.Start("\nProcessing meta-data")\r | |
330 | \r | |
331 | if self.FdfFile:\r | |
332 | #\r | |
333 | # Mark now build in AutoGen Phase\r | |
334 | #\r | |
335 | GlobalData.gAutoGenPhase = True\r | |
336 | Fdf = FdfParser(self.FdfFile.Path)\r | |
337 | Fdf.ParseFile()\r | |
338 | GlobalData.gFdfParser = Fdf\r | |
339 | GlobalData.gAutoGenPhase = False\r | |
340 | PcdSet = Fdf.Profile.PcdDict\r | |
341 | if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:\r | |
342 | FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]\r | |
343 | for FdRegion in FdDict.RegionList:\r | |
344 | if str(FdRegion.RegionType) is 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):\r | |
345 | if int(FdRegion.Offset) % 8 != 0:\r | |
346 | EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))\r | |
347 | ModuleList = Fdf.Profile.InfList\r | |
348 | self.FdfProfile = Fdf.Profile\r | |
349 | for fvname in self.FvTargetList:\r | |
350 | if fvname.upper() not in self.FdfProfile.FvDict:\r | |
351 | EdkLogger.error("build", OPTION_VALUE_INVALID,\r | |
352 | "No such an FV in FDF file: %s" % fvname)\r | |
353 | \r | |
354 | # In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,\r | |
355 | # but the path (self.MetaFile.Path) is the real path\r | |
356 | for key in self.FdfProfile.InfDict:\r | |
357 | if key == 'ArchTBD':\r | |
358 | MetaFile_cache = defaultdict(set)\r | |
359 | for Arch in self.ArchList:\r | |
360 | Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r | |
361 | for Pkey in Current_Platform_cache.Modules:\r | |
362 | MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)\r | |
363 | for Inf in self.FdfProfile.InfDict[key]:\r | |
364 | ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r | |
365 | for Arch in self.ArchList:\r | |
366 | if ModuleFile in MetaFile_cache[Arch]:\r | |
367 | break\r | |
368 | else:\r | |
369 | ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]\r | |
370 | if not ModuleData.IsBinaryModule:\r | |
371 | EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)\r | |
372 | \r | |
373 | else:\r | |
374 | for Arch in self.ArchList:\r | |
375 | if Arch == key:\r | |
376 | Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r | |
377 | MetaFileList = set()\r | |
378 | for Pkey in Platform.Modules:\r | |
379 | MetaFileList.add(Platform.Modules[Pkey].MetaFile)\r | |
380 | for Inf in self.FdfProfile.InfDict[key]:\r | |
381 | ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r | |
382 | if ModuleFile in MetaFileList:\r | |
383 | continue\r | |
384 | ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]\r | |
385 | if not ModuleData.IsBinaryModule:\r | |
386 | EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)\r | |
387 | \r | |
388 | else:\r | |
389 | PcdSet = {}\r | |
390 | ModuleList = []\r | |
391 | self.FdfProfile = None\r | |
392 | if self.FdTargetList:\r | |
393 | EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))\r | |
394 | self.FdTargetList = []\r | |
395 | if self.FvTargetList:\r | |
396 | EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))\r | |
397 | self.FvTargetList = []\r | |
398 | if self.CapTargetList:\r | |
399 | EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))\r | |
400 | self.CapTargetList = []\r | |
401 | \r | |
402 | # apply SKU and inject PCDs from Flash Definition file\r | |
403 | for Arch in self.ArchList:\r | |
404 | Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r | |
405 | PlatformPcds = Platform.Pcds\r | |
406 | self._GuidDict = Platform._GuidDict\r | |
407 | SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}\r | |
408 | BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}\r | |
409 | SourcePcdDict_Keys = SourcePcdDict.keys()\r | |
410 | BinaryPcdDict_Keys = BinaryPcdDict.keys()\r | |
411 | \r | |
412 | # generate the SourcePcdDict and BinaryPcdDict\r | |
413 | PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r | |
414 | for BuildData in PGen.BuildDatabase._CACHE_.values():\r | |
415 | if BuildData.Arch != Arch:\r | |
416 | continue\r | |
417 | if BuildData.MetaFile.Ext == '.inf':\r | |
418 | for key in BuildData.Pcds:\r | |
419 | if BuildData.Pcds[key].Pending:\r | |
420 | if key in Platform.Pcds:\r | |
421 | PcdInPlatform = Platform.Pcds[key]\r | |
422 | if PcdInPlatform.Type:\r | |
423 | BuildData.Pcds[key].Type = PcdInPlatform.Type\r | |
424 | BuildData.Pcds[key].Pending = False\r | |
425 | \r | |
426 | if BuildData.MetaFile in Platform.Modules:\r | |
427 | PlatformModule = Platform.Modules[str(BuildData.MetaFile)]\r | |
428 | if key in PlatformModule.Pcds:\r | |
429 | PcdInPlatform = PlatformModule.Pcds[key]\r | |
430 | if PcdInPlatform.Type:\r | |
431 | BuildData.Pcds[key].Type = PcdInPlatform.Type\r | |
432 | BuildData.Pcds[key].Pending = False\r | |
433 | else:\r | |
434 | #Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending\r | |
435 | if BuildData.Pcds[key].Pending:\r | |
436 | MGen = ModuleAutoGen(self, BuildData.MetaFile, Target, Toolchain, Arch, self.MetaFile)\r | |
437 | if MGen and MGen.IsLibrary:\r | |
438 | if MGen in PGen.LibraryAutoGenList:\r | |
439 | ReferenceModules = MGen.ReferenceModules\r | |
440 | for ReferenceModule in ReferenceModules:\r | |
441 | if ReferenceModule.MetaFile in Platform.Modules:\r | |
442 | RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]\r | |
443 | if key in RefPlatformModule.Pcds:\r | |
444 | PcdInReferenceModule = RefPlatformModule.Pcds[key]\r | |
445 | if PcdInReferenceModule.Type:\r | |
446 | BuildData.Pcds[key].Type = PcdInReferenceModule.Type\r | |
447 | BuildData.Pcds[key].Pending = False\r | |
448 | break\r | |
449 | \r | |
450 | if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:\r | |
451 | if BuildData.IsBinaryModule:\r | |
452 | BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
453 | else:\r | |
454 | SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
455 | \r | |
456 | elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:\r | |
457 | if BuildData.MetaFile.Ext == '.inf':\r | |
458 | if BuildData.IsBinaryModule:\r | |
459 | BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
460 | else:\r | |
461 | SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
462 | \r | |
463 | elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:\r | |
464 | SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
465 | elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:\r | |
466 | SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
467 | else:\r | |
468 | pass\r | |
469 | #\r | |
470 | # A PCD can only use one type for all source modules\r | |
471 | #\r | |
472 | for i in SourcePcdDict_Keys:\r | |
473 | for j in SourcePcdDict_Keys:\r | |
474 | if i != j:\r | |
475 | Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])\r | |
476 | if len(Intersections) > 0:\r | |
477 | EdkLogger.error(\r | |
478 | 'build',\r | |
479 | FORMAT_INVALID,\r | |
480 | "Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),\r | |
481 | ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)\r | |
482 | )\r | |
483 | \r | |
484 | #\r | |
485 | # intersection the BinaryPCD for Mixed PCD\r | |
486 | #\r | |
487 | for i in BinaryPcdDict_Keys:\r | |
488 | for j in BinaryPcdDict_Keys:\r | |
489 | if i != j:\r | |
490 | Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])\r | |
491 | for item in Intersections:\r | |
492 | NewPcd1 = (item[0] + '_' + i, item[1])\r | |
493 | NewPcd2 = (item[0] + '_' + j, item[1])\r | |
494 | if item not in GlobalData.MixedPcd:\r | |
495 | GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]\r | |
496 | else:\r | |
497 | if NewPcd1 not in GlobalData.MixedPcd[item]:\r | |
498 | GlobalData.MixedPcd[item].append(NewPcd1)\r | |
499 | if NewPcd2 not in GlobalData.MixedPcd[item]:\r | |
500 | GlobalData.MixedPcd[item].append(NewPcd2)\r | |
501 | \r | |
502 | #\r | |
503 | # intersection the SourcePCD and BinaryPCD for Mixed PCD\r | |
504 | #\r | |
505 | for i in SourcePcdDict_Keys:\r | |
506 | for j in BinaryPcdDict_Keys:\r | |
507 | if i != j:\r | |
508 | Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])\r | |
509 | for item in Intersections:\r | |
510 | NewPcd1 = (item[0] + '_' + i, item[1])\r | |
511 | NewPcd2 = (item[0] + '_' + j, item[1])\r | |
512 | if item not in GlobalData.MixedPcd:\r | |
513 | GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]\r | |
514 | else:\r | |
515 | if NewPcd1 not in GlobalData.MixedPcd[item]:\r | |
516 | GlobalData.MixedPcd[item].append(NewPcd1)\r | |
517 | if NewPcd2 not in GlobalData.MixedPcd[item]:\r | |
518 | GlobalData.MixedPcd[item].append(NewPcd2)\r | |
519 | \r | |
520 | for BuildData in PGen.BuildDatabase._CACHE_.values():\r | |
521 | if BuildData.Arch != Arch:\r | |
522 | continue\r | |
523 | for key in BuildData.Pcds:\r | |
524 | for SinglePcd in GlobalData.MixedPcd:\r | |
525 | if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:\r | |
526 | for item in GlobalData.MixedPcd[SinglePcd]:\r | |
527 | Pcd_Type = item[0].split('_')[-1]\r | |
528 | if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \\r | |
529 | (Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):\r | |
530 | Value = BuildData.Pcds[key]\r | |
531 | Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type\r | |
532 | if len(key) == 2:\r | |
533 | newkey = (Value.TokenCName, key[1])\r | |
534 | elif len(key) == 3:\r | |
535 | newkey = (Value.TokenCName, key[1], key[2])\r | |
536 | del BuildData.Pcds[key]\r | |
537 | BuildData.Pcds[newkey] = Value\r | |
538 | break\r | |
539 | break\r | |
540 | \r | |
541 | # handle the mixed pcd in FDF file\r | |
542 | for key in PcdSet:\r | |
543 | if key in GlobalData.MixedPcd:\r | |
544 | Value = PcdSet[key]\r | |
545 | del PcdSet[key]\r | |
546 | for item in GlobalData.MixedPcd[key]:\r | |
547 | PcdSet[item] = Value\r | |
548 | \r | |
549 | #Collect package set information from INF of FDF\r | |
550 | PkgSet = set()\r | |
551 | for Inf in ModuleList:\r | |
552 | ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r | |
553 | if ModuleFile in Platform.Modules:\r | |
554 | continue\r | |
555 | ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]\r | |
556 | PkgSet.update(ModuleData.Packages)\r | |
557 | Pkgs = list(PkgSet) + list(PGen.PackageList)\r | |
558 | DecPcds = set()\r | |
559 | DecPcdsKey = set()\r | |
560 | for Pkg in Pkgs:\r | |
561 | for Pcd in Pkg.Pcds:\r | |
562 | DecPcds.add((Pcd[0], Pcd[1]))\r | |
563 | DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))\r | |
564 | \r | |
565 | Platform.SkuName = self.SkuId\r | |
566 | for Name, Guid,Fileds in PcdSet:\r | |
567 | if (Name, Guid) not in DecPcds:\r | |
568 | EdkLogger.error(\r | |
569 | 'build',\r | |
570 | PARSER_ERROR,\r | |
571 | "PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),\r | |
572 | File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],\r | |
573 | Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]\r | |
574 | )\r | |
575 | else:\r | |
576 | # Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.\r | |
577 | if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \\r | |
578 | or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \\r | |
579 | or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:\r | |
580 | continue\r | |
581 | elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:\r | |
582 | EdkLogger.error(\r | |
583 | 'build',\r | |
584 | PARSER_ERROR,\r | |
585 | "Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),\r | |
586 | File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],\r | |
587 | Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]\r | |
588 | )\r | |
589 | \r | |
590 | Pa = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r | |
591 | #\r | |
592 | # Explicitly collect platform's dynamic PCDs\r | |
593 | #\r | |
594 | Pa.CollectPlatformDynamicPcds()\r | |
595 | Pa.CollectFixedAtBuildPcds()\r | |
596 | self.AutoGenObjectList.append(Pa)\r | |
597 | \r | |
598 | #\r | |
599 | # Generate Package level hash value\r | |
600 | #\r | |
601 | GlobalData.gPackageHash[Arch] = {}\r | |
602 | if GlobalData.gUseHashCache:\r | |
603 | for Pkg in Pkgs:\r | |
604 | self._GenPkgLevelHash(Pkg)\r | |
605 | \r | |
606 | #\r | |
607 | # Check PCDs token value conflict in each DEC file.\r | |
608 | #\r | |
609 | self._CheckAllPcdsTokenValueConflict()\r | |
610 | \r | |
611 | #\r | |
612 | # Check PCD type and definition between DSC and DEC\r | |
613 | #\r | |
614 | self._CheckPcdDefineAndType()\r | |
615 | \r | |
616 | #\r | |
617 | # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.\r | |
618 | #\r | |
619 | content = 'gCommandLineDefines: '\r | |
620 | content += str(GlobalData.gCommandLineDefines)\r | |
621 | content += os.linesep\r | |
622 | content += 'BuildOptionPcd: '\r | |
623 | content += str(GlobalData.BuildOptionPcd)\r | |
624 | content += os.linesep\r | |
625 | content += 'Active Platform: '\r | |
626 | content += str(self.Platform)\r | |
627 | content += os.linesep\r | |
628 | if self.FdfFile:\r | |
629 | content += 'Flash Image Definition: '\r | |
630 | content += str(self.FdfFile)\r | |
631 | content += os.linesep\r | |
632 | SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)\r | |
633 | \r | |
634 | #\r | |
635 | # Create PcdToken Number file for Dynamic/DynamicEx Pcd.\r | |
636 | #\r | |
637 | PcdTokenNumber = 'PcdTokenNumber: '\r | |
638 | if Pa.PcdTokenNumber:\r | |
639 | if Pa.DynamicPcdList:\r | |
640 | for Pcd in Pa.DynamicPcdList:\r | |
641 | PcdTokenNumber += os.linesep\r | |
642 | PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))\r | |
643 | PcdTokenNumber += ' : '\r | |
644 | PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])\r | |
645 | SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)\r | |
646 | \r | |
647 | #\r | |
648 | # Get set of workspace metafiles\r | |
649 | #\r | |
650 | AllWorkSpaceMetaFiles = self._GetMetaFiles(Target, Toolchain, Arch)\r | |
651 | \r | |
652 | #\r | |
653 | # Retrieve latest modified time of all metafiles\r | |
654 | #\r | |
655 | SrcTimeStamp = 0\r | |
656 | for f in AllWorkSpaceMetaFiles:\r | |
657 | if os.stat(f)[8] > SrcTimeStamp:\r | |
658 | SrcTimeStamp = os.stat(f)[8]\r | |
659 | self._SrcTimeStamp = SrcTimeStamp\r | |
660 | \r | |
661 | if GlobalData.gUseHashCache:\r | |
662 | m = hashlib.md5()\r | |
663 | for files in AllWorkSpaceMetaFiles:\r | |
664 | if files.endswith('.dec'):\r | |
665 | continue\r | |
666 | f = open(files, 'r')\r | |
667 | Content = f.read()\r | |
668 | f.close()\r | |
669 | m.update(Content)\r | |
670 | SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)\r | |
671 | GlobalData.gPlatformHash = m.hexdigest()\r | |
672 | \r | |
673 | #\r | |
674 | # Write metafile list to build directory\r | |
675 | #\r | |
676 | AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')\r | |
677 | if os.path.exists (AutoGenFilePath):\r | |
678 | os.remove(AutoGenFilePath)\r | |
679 | if not os.path.exists(self.BuildDir):\r | |
680 | os.makedirs(self.BuildDir)\r | |
681 | with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:\r | |
682 | for f in AllWorkSpaceMetaFiles:\r | |
683 | print(f, file=file)\r | |
684 | return True\r | |
685 | \r | |
686 | def _GenPkgLevelHash(self, Pkg):\r | |
687 | if Pkg.PackageName in GlobalData.gPackageHash[Pkg.Arch]:\r | |
688 | return\r | |
689 | \r | |
690 | PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName)\r | |
691 | CreateDirectory(PkgDir)\r | |
692 | HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r | |
693 | m = hashlib.md5()\r | |
694 | # Get .dec file's hash value\r | |
695 | f = open(Pkg.MetaFile.Path, 'r')\r | |
696 | Content = f.read()\r | |
697 | f.close()\r | |
698 | m.update(Content)\r | |
699 | # Get include files hash value\r | |
700 | if Pkg.Includes:\r | |
701 | for inc in sorted(Pkg.Includes, key=lambda x: str(x)):\r | |
702 | for Root, Dirs, Files in os.walk(str(inc)):\r | |
703 | for File in sorted(Files):\r | |
704 | File_Path = os.path.join(Root, File)\r | |
705 | f = open(File_Path, 'r')\r | |
706 | Content = f.read()\r | |
707 | f.close()\r | |
708 | m.update(Content)\r | |
709 | SaveFileOnChange(HashFile, m.hexdigest(), True)\r | |
710 | GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()\r | |
711 | \r | |
712 | def _GetMetaFiles(self, Target, Toolchain, Arch):\r | |
713 | AllWorkSpaceMetaFiles = set()\r | |
714 | #\r | |
715 | # add fdf\r | |
716 | #\r | |
717 | if self.FdfFile:\r | |
718 | AllWorkSpaceMetaFiles.add (self.FdfFile.Path)\r | |
719 | for f in GlobalData.gFdfParser.GetAllIncludedFile():\r | |
720 | AllWorkSpaceMetaFiles.add (f.FileName)\r | |
721 | #\r | |
722 | # add dsc\r | |
723 | #\r | |
724 | AllWorkSpaceMetaFiles.add(self.MetaFile.Path)\r | |
725 | \r | |
726 | #\r | |
727 | # add build_rule.txt & tools_def.txt\r | |
728 | #\r | |
729 | AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))\r | |
730 | AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))\r | |
731 | \r | |
732 | # add BuildOption metafile\r | |
733 | #\r | |
734 | AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))\r | |
735 | \r | |
736 | # add PcdToken Number file for Dynamic/DynamicEx Pcd\r | |
737 | #\r | |
738 | AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))\r | |
739 | \r | |
740 | for Arch in self.ArchList:\r | |
741 | #\r | |
742 | # add dec\r | |
743 | #\r | |
744 | for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:\r | |
745 | AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)\r | |
746 | \r | |
747 | #\r | |
748 | # add included dsc\r | |
749 | #\r | |
750 | for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:\r | |
751 | AllWorkSpaceMetaFiles.add(filePath.Path)\r | |
752 | \r | |
753 | return AllWorkSpaceMetaFiles\r | |
754 | \r | |
755 | def _CheckPcdDefineAndType(self):\r | |
756 | PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,\r | |
757 | TAB_PCDS_PATCHABLE_IN_MODULE,\r | |
758 | TAB_PCDS_FEATURE_FLAG,\r | |
759 | TAB_PCDS_DYNAMIC,\r | |
760 | TAB_PCDS_DYNAMIC_EX}\r | |
761 | \r | |
762 | # This dict store PCDs which are not used by any modules with specified arches\r | |
763 | UnusedPcd = OrderedDict()\r | |
764 | for Pa in self.AutoGenObjectList:\r | |
765 | # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid\r | |
766 | for Pcd in Pa.Platform.Pcds:\r | |
767 | PcdType = Pa.Platform.Pcds[Pcd].Type\r | |
768 | \r | |
769 | # If no PCD type, this PCD comes from FDF\r | |
770 | if not PcdType:\r | |
771 | continue\r | |
772 | \r | |
773 | # Try to remove Hii and Vpd suffix\r | |
774 | if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):\r | |
775 | PcdType = TAB_PCDS_DYNAMIC_EX\r | |
776 | elif PcdType.startswith(TAB_PCDS_DYNAMIC):\r | |
777 | PcdType = TAB_PCDS_DYNAMIC\r | |
778 | \r | |
779 | for Package in Pa.PackageList:\r | |
780 | # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType\r | |
781 | if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:\r | |
782 | break\r | |
783 | for Type in PcdTypeSet:\r | |
784 | if (Pcd[0], Pcd[1], Type) in Package.Pcds:\r | |
785 | EdkLogger.error(\r | |
786 | 'build',\r | |
787 | FORMAT_INVALID,\r | |
788 | "Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \\r | |
789 | % (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),\r | |
790 | ExtraData=None\r | |
791 | )\r | |
792 | return\r | |
793 | else:\r | |
794 | UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)\r | |
795 | \r | |
796 | for Pcd in UnusedPcd:\r | |
797 | EdkLogger.warn(\r | |
798 | 'build',\r | |
799 | "The PCD was not specified by any INF module in the platform for the given architecture.\n"\r | |
800 | "\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"\r | |
801 | % (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),\r | |
802 | ExtraData=None\r | |
803 | )\r | |
804 | \r | |
805 | def __repr__(self):\r | |
806 | return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))\r | |
807 | \r | |
808 | ## Return the directory to store FV files\r | |
809 | @cached_property\r | |
810 | def FvDir(self):\r | |
811 | return path.join(self.BuildDir, TAB_FV_DIRECTORY)\r | |
812 | \r | |
813 | ## Return the directory to store all intermediate and final files built\r | |
814 | @cached_property\r | |
815 | def BuildDir(self):\r | |
816 | return self.AutoGenObjectList[0].BuildDir\r | |
817 | \r | |
818 | ## Return the build output directory platform specifies\r | |
819 | @cached_property\r | |
820 | def OutputDir(self):\r | |
821 | return self.Platform.OutputDirectory\r | |
822 | \r | |
823 | ## Return platform name\r | |
824 | @cached_property\r | |
825 | def Name(self):\r | |
826 | return self.Platform.PlatformName\r | |
827 | \r | |
828 | ## Return meta-file GUID\r | |
829 | @cached_property\r | |
830 | def Guid(self):\r | |
831 | return self.Platform.Guid\r | |
832 | \r | |
833 | ## Return platform version\r | |
834 | @cached_property\r | |
835 | def Version(self):\r | |
836 | return self.Platform.Version\r | |
837 | \r | |
838 | ## Return paths of tools\r | |
839 | @cached_property\r | |
840 | def ToolDefinition(self):\r | |
841 | return self.AutoGenObjectList[0].ToolDefinition\r | |
842 | \r | |
843 | ## Return directory of platform makefile\r | |
844 | #\r | |
845 | # @retval string Makefile directory\r | |
846 | #\r | |
847 | @cached_property\r | |
848 | def MakeFileDir(self):\r | |
849 | return self.BuildDir\r | |
850 | \r | |
851 | ## Return build command string\r | |
852 | #\r | |
853 | # @retval string Build command string\r | |
854 | #\r | |
855 | @cached_property\r | |
856 | def BuildCommand(self):\r | |
857 | # BuildCommand should be all the same. So just get one from platform AutoGen\r | |
858 | return self.AutoGenObjectList[0].BuildCommand\r | |
859 | \r | |
860 | ## Check the PCDs token value conflict in each DEC file.\r | |
861 | #\r | |
862 | # Will cause build break and raise error message while two PCDs conflict.\r | |
863 | #\r | |
864 | # @return None\r | |
865 | #\r | |
866 | def _CheckAllPcdsTokenValueConflict(self):\r | |
867 | for Pa in self.AutoGenObjectList:\r | |
868 | for Package in Pa.PackageList:\r | |
869 | PcdList = Package.Pcds.values()\r | |
870 | PcdList.sort(key=lambda x: int(x.TokenValue, 0))\r | |
871 | Count = 0\r | |
872 | while (Count < len(PcdList) - 1) :\r | |
873 | Item = PcdList[Count]\r | |
874 | ItemNext = PcdList[Count + 1]\r | |
875 | #\r | |
876 | # Make sure in the same token space the TokenValue should be unique\r | |
877 | #\r | |
878 | if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):\r | |
879 | SameTokenValuePcdList = []\r | |
880 | SameTokenValuePcdList.append(Item)\r | |
881 | SameTokenValuePcdList.append(ItemNext)\r | |
882 | RemainPcdListLength = len(PcdList) - Count - 2\r | |
883 | for ValueSameCount in range(RemainPcdListLength):\r | |
884 | if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):\r | |
885 | SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])\r | |
886 | else:\r | |
887 | break;\r | |
888 | #\r | |
889 | # Sort same token value PCD list with TokenGuid and TokenCName\r | |
890 | #\r | |
891 | SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r | |
892 | SameTokenValuePcdListCount = 0\r | |
893 | while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):\r | |
894 | Flag = False\r | |
895 | TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]\r | |
896 | TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]\r | |
897 | \r | |
898 | if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):\r | |
899 | for PcdItem in GlobalData.MixedPcd:\r | |
900 | if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \\r | |
901 | (TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
902 | Flag = True\r | |
903 | if not Flag:\r | |
904 | EdkLogger.error(\r | |
905 | 'build',\r | |
906 | FORMAT_INVALID,\r | |
907 | "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\\r | |
908 | % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),\r | |
909 | ExtraData=None\r | |
910 | )\r | |
911 | SameTokenValuePcdListCount += 1\r | |
912 | Count += SameTokenValuePcdListCount\r | |
913 | Count += 1\r | |
914 | \r | |
915 | PcdList = Package.Pcds.values()\r | |
916 | PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r | |
917 | Count = 0\r | |
918 | while (Count < len(PcdList) - 1) :\r | |
919 | Item = PcdList[Count]\r | |
920 | ItemNext = PcdList[Count + 1]\r | |
921 | #\r | |
922 | # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.\r | |
923 | #\r | |
924 | if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):\r | |
925 | EdkLogger.error(\r | |
926 | 'build',\r | |
927 | FORMAT_INVALID,\r | |
928 | "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\\r | |
929 | % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),\r | |
930 | ExtraData=None\r | |
931 | )\r | |
932 | Count += 1\r | |
933 | ## Generate fds command\r | |
934 | @property\r | |
935 | def GenFdsCommand(self):\r | |
936 | return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()\r | |
937 | \r | |
938 | ## Create makefile for the platform and modules in it\r | |
939 | #\r | |
940 | # @param CreateDepsMakeFile Flag indicating if the makefile for\r | |
941 | # modules will be created as well\r | |
942 | #\r | |
943 | def CreateMakeFile(self, CreateDepsMakeFile=False):\r | |
944 | if not CreateDepsMakeFile:\r | |
945 | return\r | |
946 | for Pa in self.AutoGenObjectList:\r | |
947 | Pa.CreateMakeFile(True)\r | |
948 | \r | |
949 | ## Create autogen code for platform and modules\r | |
950 | #\r | |
951 | # Since there's no autogen code for platform, this method will do nothing\r | |
952 | # if CreateModuleCodeFile is set to False.\r | |
953 | #\r | |
954 | # @param CreateDepsCodeFile Flag indicating if creating module's\r | |
955 | # autogen code file or not\r | |
956 | #\r | |
957 | def CreateCodeFile(self, CreateDepsCodeFile=False):\r | |
958 | if not CreateDepsCodeFile:\r | |
959 | return\r | |
960 | for Pa in self.AutoGenObjectList:\r | |
961 | Pa.CreateCodeFile(True)\r | |
962 | \r | |
963 | ## Create AsBuilt INF file the platform\r | |
964 | #\r | |
965 | def CreateAsBuiltInf(self):\r | |
966 | return\r | |
967 | \r | |
968 | \r | |
969 | ## AutoGen class for platform\r | |
970 | #\r | |
971 | # PlatformAutoGen class will process the original information in platform\r | |
972 | # file in order to generate makefile for platform.\r | |
973 | #\r | |
974 | class PlatformAutoGen(AutoGen):\r | |
975 | # call super().__init__ then call the worker function with different parameter count\r | |
976 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
977 | if not hasattr(self, "_Init"):\r | |
978 | super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
979 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)\r | |
980 | self._Init = True\r | |
981 | #\r | |
982 | # Used to store all PCDs for both PEI and DXE phase, in order to generate\r | |
983 | # correct PCD database\r | |
984 | #\r | |
985 | _DynaPcdList_ = []\r | |
986 | _NonDynaPcdList_ = []\r | |
987 | _PlatformPcds = {}\r | |
988 | \r | |
989 | #\r | |
990 | # The priority list while override build option\r | |
991 | #\r | |
992 | PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)\r | |
993 | "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
994 | "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
995 | "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
996 | "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r | |
997 | "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r | |
998 | "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE\r | |
999 | "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE\r | |
1000 | "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r | |
1001 | "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r | |
1002 | "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE\r | |
1003 | "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE\r | |
1004 | "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE\r | |
1005 | "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE\r | |
1006 | "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE\r | |
1007 | "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)\r | |
1008 | \r | |
1009 | ## Initialize PlatformAutoGen\r | |
1010 | #\r | |
1011 | #\r | |
1012 | # @param Workspace WorkspaceAutoGen object\r | |
1013 | # @param PlatformFile Platform file (DSC file)\r | |
1014 | # @param Target Build target (DEBUG, RELEASE)\r | |
1015 | # @param Toolchain Name of tool chain\r | |
1016 | # @param Arch arch of the platform supports\r | |
1017 | #\r | |
1018 | def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):\r | |
1019 | EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))\r | |
1020 | GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)\r | |
1021 | \r | |
1022 | self.MetaFile = PlatformFile\r | |
1023 | self.Workspace = Workspace\r | |
1024 | self.WorkspaceDir = Workspace.WorkspaceDir\r | |
1025 | self.ToolChain = Toolchain\r | |
1026 | self.BuildTarget = Target\r | |
1027 | self.Arch = Arch\r | |
1028 | self.SourceDir = PlatformFile.SubDir\r | |
1029 | self.SourceOverrideDir = None\r | |
1030 | self.FdTargetList = self.Workspace.FdTargetList\r | |
1031 | self.FvTargetList = self.Workspace.FvTargetList\r | |
1032 | self.AllPcdList = []\r | |
1033 | # get the original module/package/platform objects\r | |
1034 | self.BuildDatabase = Workspace.BuildDatabase\r | |
1035 | self.DscBuildDataObj = Workspace.Platform\r | |
1036 | \r | |
1037 | # flag indicating if the makefile/C-code file has been created or not\r | |
1038 | self.IsMakeFileCreated = False\r | |
1039 | \r | |
1040 | self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r | |
1041 | self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r | |
1042 | \r | |
1043 | self._AsBuildInfList = []\r | |
1044 | self._AsBuildModuleList = []\r | |
1045 | \r | |
1046 | self.VariableInfo = None\r | |
1047 | \r | |
1048 | if GlobalData.gFdfParser is not None:\r | |
1049 | self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList\r | |
1050 | for Inf in self._AsBuildInfList:\r | |
1051 | InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)\r | |
1052 | M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1053 | if not M.IsBinaryModule:\r | |
1054 | continue\r | |
1055 | self._AsBuildModuleList.append(InfClass)\r | |
1056 | # get library/modules for build\r | |
1057 | self.LibraryBuildDirectoryList = []\r | |
1058 | self.ModuleBuildDirectoryList = []\r | |
1059 | \r | |
1060 | return True\r | |
1061 | \r | |
1062 | @cached_class_function\r | |
1063 | def __repr__(self):\r | |
1064 | return "%s [%s]" % (self.MetaFile, self.Arch)\r | |
1065 | \r | |
1066 | ## Create autogen code for platform and modules\r | |
1067 | #\r | |
1068 | # Since there's no autogen code for platform, this method will do nothing\r | |
1069 | # if CreateModuleCodeFile is set to False.\r | |
1070 | #\r | |
1071 | # @param CreateModuleCodeFile Flag indicating if creating module's\r | |
1072 | # autogen code file or not\r | |
1073 | #\r | |
1074 | @cached_class_function\r | |
1075 | def CreateCodeFile(self, CreateModuleCodeFile=False):\r | |
1076 | # only module has code to be greated, so do nothing if CreateModuleCodeFile is False\r | |
1077 | if not CreateModuleCodeFile:\r | |
1078 | return\r | |
1079 | \r | |
1080 | for Ma in self.ModuleAutoGenList:\r | |
1081 | Ma.CreateCodeFile(True)\r | |
1082 | \r | |
1083 | ## Generate Fds Command\r | |
1084 | @cached_property\r | |
1085 | def GenFdsCommand(self):\r | |
1086 | return self.Workspace.GenFdsCommand\r | |
1087 | \r | |
1088 | ## Create makefile for the platform and modules in it\r | |
1089 | #\r | |
1090 | # @param CreateModuleMakeFile Flag indicating if the makefile for\r | |
1091 | # modules will be created as well\r | |
1092 | #\r | |
1093 | def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):\r | |
1094 | if CreateModuleMakeFile:\r | |
1095 | for Ma in self._MaList:\r | |
1096 | key = (Ma.MetaFile.File, self.Arch)\r | |
1097 | if key in FfsCommand:\r | |
1098 | Ma.CreateMakeFile(True, FfsCommand[key])\r | |
1099 | else:\r | |
1100 | Ma.CreateMakeFile(True)\r | |
1101 | \r | |
1102 | # no need to create makefile for the platform more than once\r | |
1103 | if self.IsMakeFileCreated:\r | |
1104 | return\r | |
1105 | \r | |
1106 | # create library/module build dirs for platform\r | |
1107 | Makefile = GenMake.PlatformMakefile(self)\r | |
1108 | self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()\r | |
1109 | self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()\r | |
1110 | \r | |
1111 | self.IsMakeFileCreated = True\r | |
1112 | \r | |
1113 | ## Deal with Shared FixedAtBuild Pcds\r | |
1114 | #\r | |
1115 | def CollectFixedAtBuildPcds(self):\r | |
1116 | for LibAuto in self.LibraryAutoGenList:\r | |
1117 | FixedAtBuildPcds = {}\r | |
1118 | ShareFixedAtBuildPcdsSameValue = {}\r | |
1119 | for Module in LibAuto.ReferenceModules:\r | |
1120 | for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):\r | |
1121 | DefaultValue = Pcd.DefaultValue\r | |
1122 | # Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib\r | |
1123 | if Pcd in Module.LibraryPcdList:\r | |
1124 | Index = Module.LibraryPcdList.index(Pcd)\r | |
1125 | DefaultValue = Module.LibraryPcdList[Index].DefaultValue\r | |
1126 | key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1127 | if key not in FixedAtBuildPcds:\r | |
1128 | ShareFixedAtBuildPcdsSameValue[key] = True\r | |
1129 | FixedAtBuildPcds[key] = DefaultValue\r | |
1130 | else:\r | |
1131 | if FixedAtBuildPcds[key] != DefaultValue:\r | |
1132 | ShareFixedAtBuildPcdsSameValue[key] = False\r | |
1133 | for Pcd in LibAuto.FixedAtBuildPcds:\r | |
1134 | key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1135 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:\r | |
1136 | continue\r | |
1137 | else:\r | |
1138 | DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]\r | |
1139 | if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r | |
1140 | continue\r | |
1141 | if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:\r | |
1142 | LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]\r | |
1143 | \r | |
1144 | def CollectVariables(self, DynamicPcdSet):\r | |
1145 | VpdRegionSize = 0\r | |
1146 | VpdRegionBase = 0\r | |
1147 | if self.Workspace.FdfFile:\r | |
1148 | FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]\r | |
1149 | for FdRegion in FdDict.RegionList:\r | |
1150 | for item in FdRegion.RegionDataList:\r | |
1151 | if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:\r | |
1152 | VpdRegionSize = FdRegion.Size\r | |
1153 | VpdRegionBase = FdRegion.Offset\r | |
1154 | break\r | |
1155 | \r | |
1156 | VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)\r | |
1157 | VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)\r | |
1158 | VariableInfo.SetVpdRegionOffset(VpdRegionBase)\r | |
1159 | Index = 0\r | |
1160 | for Pcd in DynamicPcdSet:\r | |
1161 | pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1162 | for SkuName in Pcd.SkuInfoList:\r | |
1163 | Sku = Pcd.SkuInfoList[SkuName]\r | |
1164 | SkuId = Sku.SkuId\r | |
1165 | if SkuId is None or SkuId == '':\r | |
1166 | continue\r | |
1167 | if len(Sku.VariableName) > 0:\r | |
1168 | VariableGuidStructure = Sku.VariableGuidValue\r | |
1169 | VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)\r | |
1170 | for StorageName in Sku.DefaultStoreDict:\r | |
1171 | VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName], Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))\r | |
1172 | Index += 1\r | |
1173 | return VariableInfo\r | |
1174 | \r | |
1175 | def UpdateNVStoreMaxSize(self, OrgVpdFile):\r | |
1176 | if self.VariableInfo:\r | |
1177 | VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)\r | |
1178 | PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]\r | |
1179 | \r | |
1180 | if PcdNvStoreDfBuffer:\r | |
1181 | if os.path.exists(VpdMapFilePath):\r | |
1182 | OrgVpdFile.Read(VpdMapFilePath)\r | |
1183 | PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])\r | |
1184 | NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'\r | |
1185 | else:\r | |
1186 | EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r | |
1187 | \r | |
1188 | NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)\r | |
1189 | default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)\r | |
1190 | maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))\r | |
1191 | var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)\r | |
1192 | \r | |
1193 | if var_data and default_skuobj:\r | |
1194 | default_skuobj.DefaultValue = var_data\r | |
1195 | PcdNvStoreDfBuffer[0].DefaultValue = var_data\r | |
1196 | PcdNvStoreDfBuffer[0].SkuInfoList.clear()\r | |
1197 | PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj\r | |
1198 | PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))\r | |
1199 | \r | |
1200 | return OrgVpdFile\r | |
1201 | \r | |
1202 | ## Collect dynamic PCDs\r | |
1203 | #\r | |
1204 | # Gather dynamic PCDs list from each module and their settings from platform\r | |
1205 | # This interface should be invoked explicitly when platform action is created.\r | |
1206 | #\r | |
1207 | def CollectPlatformDynamicPcds(self):\r | |
1208 | for key in self.Platform.Pcds:\r | |
1209 | for SinglePcd in GlobalData.MixedPcd:\r | |
1210 | if (self.Platform.Pcds[key].TokenCName, self.Platform.Pcds[key].TokenSpaceGuidCName) == SinglePcd:\r | |
1211 | for item in GlobalData.MixedPcd[SinglePcd]:\r | |
1212 | Pcd_Type = item[0].split('_')[-1]\r | |
1213 | if (Pcd_Type == self.Platform.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and self.Platform.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \\r | |
1214 | (Pcd_Type == TAB_PCDS_DYNAMIC and self.Platform.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):\r | |
1215 | Value = self.Platform.Pcds[key]\r | |
1216 | Value.TokenCName = self.Platform.Pcds[key].TokenCName + '_' + Pcd_Type\r | |
1217 | if len(key) == 2:\r | |
1218 | newkey = (Value.TokenCName, key[1])\r | |
1219 | elif len(key) == 3:\r | |
1220 | newkey = (Value.TokenCName, key[1], key[2])\r | |
1221 | del self.Platform.Pcds[key]\r | |
1222 | self.Platform.Pcds[newkey] = Value\r | |
1223 | break\r | |
1224 | break\r | |
1225 | \r | |
1226 | # for gathering error information\r | |
1227 | NoDatumTypePcdList = set()\r | |
1228 | FdfModuleList = []\r | |
1229 | for InfName in self._AsBuildInfList:\r | |
1230 | InfName = mws.join(self.WorkspaceDir, InfName)\r | |
1231 | FdfModuleList.append(os.path.normpath(InfName))\r | |
1232 | for M in self._MaList:\r | |
1233 | # F is the Module for which M is the module autogen\r | |
1234 | for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:\r | |
1235 | # make sure that the "VOID*" kind of datum has MaxDatumSize set\r | |
1236 | if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:\r | |
1237 | NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))\r | |
1238 | \r | |
1239 | # Check the PCD from Binary INF or Source INF\r | |
1240 | if M.IsBinaryModule == True:\r | |
1241 | PcdFromModule.IsFromBinaryInf = True\r | |
1242 | \r | |
1243 | # Check the PCD from DSC or not\r | |
1244 | PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds\r | |
1245 | \r | |
1246 | if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1247 | if M.MetaFile.Path not in FdfModuleList:\r | |
1248 | # If one of the Source built modules listed in the DSC is not listed\r | |
1249 | # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic\r | |
1250 | # access method (it is only listed in the DEC file that declares the\r | |
1251 | # PCD as PcdsDynamic), then build tool will report warning message\r | |
1252 | # notify the PI that they are attempting to build a module that must\r | |
1253 | # be included in a flash image in order to be functional. These Dynamic\r | |
1254 | # PCD will not be added into the Database unless it is used by other\r | |
1255 | # modules that are included in the FDF file.\r | |
1256 | if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \\r | |
1257 | PcdFromModule.IsFromBinaryInf == False:\r | |
1258 | # Print warning message to let the developer make a determine.\r | |
1259 | continue\r | |
1260 | # If one of the Source built modules listed in the DSC is not listed in\r | |
1261 | # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx\r | |
1262 | # access method (it is only listed in the DEC file that declares the\r | |
1263 | # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the\r | |
1264 | # PCD to the Platform's PCD Database.\r | |
1265 | if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1266 | continue\r | |
1267 | #\r | |
1268 | # If a dynamic PCD used by a PEM module/PEI module & DXE module,\r | |
1269 | # it should be stored in Pcd PEI database, If a dynamic only\r | |
1270 | # used by DXE module, it should be stored in DXE PCD database.\r | |
1271 | # The default Phase is DXE\r | |
1272 | #\r | |
1273 | if M.ModuleType in SUP_MODULE_SET_PEI:\r | |
1274 | PcdFromModule.Phase = "PEI"\r | |
1275 | if PcdFromModule not in self._DynaPcdList_:\r | |
1276 | self._DynaPcdList_.append(PcdFromModule)\r | |
1277 | elif PcdFromModule.Phase == 'PEI':\r | |
1278 | # overwrite any the same PCD existing, if Phase is PEI\r | |
1279 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1280 | self._DynaPcdList_[Index] = PcdFromModule\r | |
1281 | elif PcdFromModule not in self._NonDynaPcdList_:\r | |
1282 | self._NonDynaPcdList_.append(PcdFromModule)\r | |
1283 | elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:\r | |
1284 | Index = self._NonDynaPcdList_.index(PcdFromModule)\r | |
1285 | if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:\r | |
1286 | #The PCD from Binary INF will override the same one from source INF\r | |
1287 | self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])\r | |
1288 | PcdFromModule.Pending = False\r | |
1289 | self._NonDynaPcdList_.append (PcdFromModule)\r | |
1290 | DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}\r | |
1291 | # add the PCD from modules that listed in FDF but not in DSC to Database\r | |
1292 | for InfName in FdfModuleList:\r | |
1293 | if InfName not in DscModuleSet:\r | |
1294 | InfClass = PathClass(InfName)\r | |
1295 | M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1296 | # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)\r | |
1297 | # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.\r | |
1298 | # For binary module, if in current arch, we need to list the PCDs into database.\r | |
1299 | if not M.IsBinaryModule:\r | |
1300 | continue\r | |
1301 | # Override the module PCD setting by platform setting\r | |
1302 | ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)\r | |
1303 | for PcdFromModule in ModulePcdList:\r | |
1304 | PcdFromModule.IsFromBinaryInf = True\r | |
1305 | PcdFromModule.IsFromDsc = False\r | |
1306 | # Only allow the DynamicEx and Patchable PCD in AsBuild INF\r | |
1307 | if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
1308 | EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r | |
1309 | File=self.MetaFile,\r | |
1310 | ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"\r | |
1311 | % (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))\r | |
1312 | # make sure that the "VOID*" kind of datum has MaxDatumSize set\r | |
1313 | if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:\r | |
1314 | NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))\r | |
1315 | if M.ModuleType in SUP_MODULE_SET_PEI:\r | |
1316 | PcdFromModule.Phase = "PEI"\r | |
1317 | if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1318 | self._DynaPcdList_.append(PcdFromModule)\r | |
1319 | elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
1320 | self._NonDynaPcdList_.append(PcdFromModule)\r | |
1321 | if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1322 | # Overwrite the phase of any the same PCD existing, if Phase is PEI.\r | |
1323 | # It is to solve the case that a dynamic PCD used by a PEM module/PEI\r | |
1324 | # module & DXE module at a same time.\r | |
1325 | # Overwrite the type of the PCDs in source INF by the type of AsBuild\r | |
1326 | # INF file as DynamicEx.\r | |
1327 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1328 | self._DynaPcdList_[Index].Phase = PcdFromModule.Phase\r | |
1329 | self._DynaPcdList_[Index].Type = PcdFromModule.Type\r | |
1330 | for PcdFromModule in self._NonDynaPcdList_:\r | |
1331 | # If a PCD is not listed in the DSC file, but binary INF files used by\r | |
1332 | # this platform all (that use this PCD) list the PCD in a [PatchPcds]\r | |
1333 | # section, AND all source INF files used by this platform the build\r | |
1334 | # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]\r | |
1335 | # section, then the tools must NOT add the PCD to the Platform's PCD\r | |
1336 | # Database; the build must assign the access method for this PCD as\r | |
1337 | # PcdsPatchableInModule.\r | |
1338 | if PcdFromModule not in self._DynaPcdList_:\r | |
1339 | continue\r | |
1340 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1341 | if PcdFromModule.IsFromDsc == False and \\r | |
1342 | PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \\r | |
1343 | PcdFromModule.IsFromBinaryInf == True and \\r | |
1344 | self._DynaPcdList_[Index].IsFromBinaryInf == False:\r | |
1345 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1346 | self._DynaPcdList_.remove (self._DynaPcdList_[Index])\r | |
1347 | \r | |
1348 | # print out error information and break the build, if error found\r | |
1349 | if len(NoDatumTypePcdList) > 0:\r | |
1350 | NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)\r | |
1351 | EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r | |
1352 | File=self.MetaFile,\r | |
1353 | ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"\r | |
1354 | % NoDatumTypePcdListString)\r | |
1355 | self._NonDynamicPcdList = self._NonDynaPcdList_\r | |
1356 | self._DynamicPcdList = self._DynaPcdList_\r | |
1357 | #\r | |
1358 | # Sort dynamic PCD list to:\r | |
1359 | # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should\r | |
1360 | # try to be put header of dynamicd List\r | |
1361 | # 2) If PCD is HII type, the PCD item should be put after unicode type PCD\r | |
1362 | #\r | |
1363 | # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.\r | |
1364 | #\r | |
1365 | UnicodePcdArray = set()\r | |
1366 | HiiPcdArray = set()\r | |
1367 | OtherPcdArray = set()\r | |
1368 | VpdPcdDict = {}\r | |
1369 | VpdFile = VpdInfoFile.VpdInfoFile()\r | |
1370 | NeedProcessVpdMapFile = False\r | |
1371 | \r | |
1372 | for pcd in self.Platform.Pcds:\r | |
1373 | if pcd not in self._PlatformPcds:\r | |
1374 | self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]\r | |
1375 | \r | |
1376 | for item in self._PlatformPcds:\r | |
1377 | if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r | |
1378 | self._PlatformPcds[item].DatumType = TAB_VOID\r | |
1379 | \r | |
1380 | if (self.Workspace.ArchList[-1] == self.Arch):\r | |
1381 | for Pcd in self._DynamicPcdList:\r | |
1382 | # just pick the a value to determine whether is unicode string type\r | |
1383 | Sku = Pcd.SkuInfoList.values()[0]\r | |
1384 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1385 | \r | |
1386 | if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r | |
1387 | Pcd.DatumType = TAB_VOID\r | |
1388 | \r | |
1389 | # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r | |
1390 | # if found HII type PCD then insert to right of UnicodeIndex\r | |
1391 | if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r | |
1392 | VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd\r | |
1393 | \r | |
1394 | #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer\r | |
1395 | PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))\r | |
1396 | if PcdNvStoreDfBuffer:\r | |
1397 | self.VariableInfo = self.CollectVariables(self._DynamicPcdList)\r | |
1398 | vardump = self.VariableInfo.dump()\r | |
1399 | if vardump:\r | |
1400 | PcdNvStoreDfBuffer.DefaultValue = vardump\r | |
1401 | for skuname in PcdNvStoreDfBuffer.SkuInfoList:\r | |
1402 | PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump\r | |
1403 | PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))\r | |
1404 | else:\r | |
1405 | #If the end user define [DefaultStores] and [XXX.Menufacturing] in DSC, but forget to configure PcdNvStoreDefaultValueBuffer to PcdsDynamicVpd\r | |
1406 | if [Pcd for Pcd in self._DynamicPcdList if Pcd.UserDefinedDefaultStoresFlag]:\r | |
1407 | EdkLogger.warn("build", "PcdNvStoreDefaultValueBuffer should be defined as PcdsDynamicExVpd in dsc file since the DefaultStores is enabled for this platform.\n%s" %self.Platform.MetaFile.Path)\r | |
1408 | PlatformPcds = sorted(self._PlatformPcds.keys())\r | |
1409 | #\r | |
1410 | # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.\r | |
1411 | #\r | |
1412 | VpdSkuMap = {}\r | |
1413 | for PcdKey in PlatformPcds:\r | |
1414 | Pcd = self._PlatformPcds[PcdKey]\r | |
1415 | if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \\r | |
1416 | PcdKey in VpdPcdDict:\r | |
1417 | Pcd = VpdPcdDict[PcdKey]\r | |
1418 | SkuValueMap = {}\r | |
1419 | DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r | |
1420 | if DefaultSku:\r | |
1421 | PcdValue = DefaultSku.DefaultValue\r | |
1422 | if PcdValue not in SkuValueMap:\r | |
1423 | SkuValueMap[PcdValue] = []\r | |
1424 | VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)\r | |
1425 | SkuValueMap[PcdValue].append(DefaultSku)\r | |
1426 | \r | |
1427 | for (SkuName, Sku) in Pcd.SkuInfoList.items():\r | |
1428 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1429 | PcdValue = Sku.DefaultValue\r | |
1430 | if PcdValue == "":\r | |
1431 | PcdValue = Pcd.DefaultValue\r | |
1432 | if Sku.VpdOffset != '*':\r | |
1433 | if PcdValue.startswith("{"):\r | |
1434 | Alignment = 8\r | |
1435 | elif PcdValue.startswith("L"):\r | |
1436 | Alignment = 2\r | |
1437 | else:\r | |
1438 | Alignment = 1\r | |
1439 | try:\r | |
1440 | VpdOffset = int(Sku.VpdOffset)\r | |
1441 | except:\r | |
1442 | try:\r | |
1443 | VpdOffset = int(Sku.VpdOffset, 16)\r | |
1444 | except:\r | |
1445 | EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1446 | if VpdOffset % Alignment != 0:\r | |
1447 | if PcdValue.startswith("{"):\r | |
1448 | EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)\r | |
1449 | else:\r | |
1450 | EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))\r | |
1451 | if PcdValue not in SkuValueMap:\r | |
1452 | SkuValueMap[PcdValue] = []\r | |
1453 | VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)\r | |
1454 | SkuValueMap[PcdValue].append(Sku)\r | |
1455 | # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r | |
1456 | if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r | |
1457 | NeedProcessVpdMapFile = True\r | |
1458 | if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':\r | |
1459 | EdkLogger.error("Build", FILE_NOT_FOUND, \\r | |
1460 | "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r | |
1461 | \r | |
1462 | VpdSkuMap[PcdKey] = SkuValueMap\r | |
1463 | #\r | |
1464 | # Fix the PCDs define in VPD PCD section that never referenced by module.\r | |
1465 | # An example is PCD for signature usage.\r | |
1466 | #\r | |
1467 | for DscPcd in PlatformPcds:\r | |
1468 | DscPcdEntry = self._PlatformPcds[DscPcd]\r | |
1469 | if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r | |
1470 | if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):\r | |
1471 | FoundFlag = False\r | |
1472 | for VpdPcd in VpdFile._VpdArray:\r | |
1473 | # This PCD has been referenced by module\r | |
1474 | if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r | |
1475 | (VpdPcd.TokenCName == DscPcdEntry.TokenCName):\r | |
1476 | FoundFlag = True\r | |
1477 | \r | |
1478 | # Not found, it should be signature\r | |
1479 | if not FoundFlag :\r | |
1480 | # just pick the a value to determine whether is unicode string type\r | |
1481 | SkuValueMap = {}\r | |
1482 | SkuObjList = DscPcdEntry.SkuInfoList.items()\r | |
1483 | DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)\r | |
1484 | if DefaultSku:\r | |
1485 | defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))\r | |
1486 | SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]\r | |
1487 | for (SkuName, Sku) in SkuObjList:\r | |
1488 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1489 | \r | |
1490 | # Need to iterate DEC pcd information to get the value & datumtype\r | |
1491 | for eachDec in self.PackageList:\r | |
1492 | for DecPcd in eachDec.Pcds:\r | |
1493 | DecPcdEntry = eachDec.Pcds[DecPcd]\r | |
1494 | if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r | |
1495 | (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):\r | |
1496 | # Print warning message to let the developer make a determine.\r | |
1497 | EdkLogger.warn("build", "Unreferenced vpd pcd used!",\r | |
1498 | File=self.MetaFile, \\r | |
1499 | ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \\r | |
1500 | %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))\r | |
1501 | \r | |
1502 | DscPcdEntry.DatumType = DecPcdEntry.DatumType\r | |
1503 | DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue\r | |
1504 | DscPcdEntry.TokenValue = DecPcdEntry.TokenValue\r | |
1505 | DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]\r | |
1506 | # Only fix the value while no value provided in DSC file.\r | |
1507 | if not Sku.DefaultValue:\r | |
1508 | DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue\r | |
1509 | \r | |
1510 | if DscPcdEntry not in self._DynamicPcdList:\r | |
1511 | self._DynamicPcdList.append(DscPcdEntry)\r | |
1512 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1513 | PcdValue = Sku.DefaultValue\r | |
1514 | if PcdValue == "":\r | |
1515 | PcdValue = DscPcdEntry.DefaultValue\r | |
1516 | if Sku.VpdOffset != '*':\r | |
1517 | if PcdValue.startswith("{"):\r | |
1518 | Alignment = 8\r | |
1519 | elif PcdValue.startswith("L"):\r | |
1520 | Alignment = 2\r | |
1521 | else:\r | |
1522 | Alignment = 1\r | |
1523 | try:\r | |
1524 | VpdOffset = int(Sku.VpdOffset)\r | |
1525 | except:\r | |
1526 | try:\r | |
1527 | VpdOffset = int(Sku.VpdOffset, 16)\r | |
1528 | except:\r | |
1529 | EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))\r | |
1530 | if VpdOffset % Alignment != 0:\r | |
1531 | if PcdValue.startswith("{"):\r | |
1532 | EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)\r | |
1533 | else:\r | |
1534 | EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))\r | |
1535 | if PcdValue not in SkuValueMap:\r | |
1536 | SkuValueMap[PcdValue] = []\r | |
1537 | VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)\r | |
1538 | SkuValueMap[PcdValue].append(Sku)\r | |
1539 | if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r | |
1540 | NeedProcessVpdMapFile = True\r | |
1541 | if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):\r | |
1542 | UnicodePcdArray.add(DscPcdEntry)\r | |
1543 | elif len(Sku.VariableName) > 0:\r | |
1544 | HiiPcdArray.add(DscPcdEntry)\r | |
1545 | else:\r | |
1546 | OtherPcdArray.add(DscPcdEntry)\r | |
1547 | \r | |
1548 | # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r | |
1549 | VpdSkuMap[DscPcd] = SkuValueMap\r | |
1550 | if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \\r | |
1551 | VpdFile.GetCount() != 0:\r | |
1552 | EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,\r | |
1553 | "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r | |
1554 | \r | |
1555 | if VpdFile.GetCount() != 0:\r | |
1556 | \r | |
1557 | self.FixVpdOffset(VpdFile)\r | |
1558 | \r | |
1559 | self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))\r | |
1560 | \r | |
1561 | # Process VPD map file generated by third party BPDG tool\r | |
1562 | if NeedProcessVpdMapFile:\r | |
1563 | VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)\r | |
1564 | if os.path.exists(VpdMapFilePath):\r | |
1565 | VpdFile.Read(VpdMapFilePath)\r | |
1566 | \r | |
1567 | # Fixup "*" offset\r | |
1568 | for pcd in VpdSkuMap:\r | |
1569 | vpdinfo = VpdFile.GetVpdInfo(pcd)\r | |
1570 | if vpdinfo is None:\r | |
1571 | # just pick the a value to determine whether is unicode string type\r | |
1572 | continue\r | |
1573 | for pcdvalue in VpdSkuMap[pcd]:\r | |
1574 | for sku in VpdSkuMap[pcd][pcdvalue]:\r | |
1575 | for item in vpdinfo:\r | |
1576 | if item[2] == pcdvalue:\r | |
1577 | sku.VpdOffset = item[1]\r | |
1578 | else:\r | |
1579 | EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r | |
1580 | \r | |
1581 | # Delete the DynamicPcdList At the last time enter into this function\r | |
1582 | for Pcd in self._DynamicPcdList:\r | |
1583 | # just pick the a value to determine whether is unicode string type\r | |
1584 | Sku = Pcd.SkuInfoList.values()[0]\r | |
1585 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1586 | \r | |
1587 | if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r | |
1588 | Pcd.DatumType = TAB_VOID\r | |
1589 | \r | |
1590 | PcdValue = Sku.DefaultValue\r | |
1591 | if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):\r | |
1592 | # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r | |
1593 | UnicodePcdArray.add(Pcd)\r | |
1594 | elif len(Sku.VariableName) > 0:\r | |
1595 | # if found HII type PCD then insert to right of UnicodeIndex\r | |
1596 | HiiPcdArray.add(Pcd)\r | |
1597 | else:\r | |
1598 | OtherPcdArray.add(Pcd)\r | |
1599 | del self._DynamicPcdList[:]\r | |
1600 | self._DynamicPcdList.extend(list(UnicodePcdArray))\r | |
1601 | self._DynamicPcdList.extend(list(HiiPcdArray))\r | |
1602 | self._DynamicPcdList.extend(list(OtherPcdArray))\r | |
1603 | allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]\r | |
1604 | for pcd in self._DynamicPcdList:\r | |
1605 | if len(pcd.SkuInfoList) == 1:\r | |
1606 | for (SkuName, SkuId) in allskuset:\r | |
1607 | if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:\r | |
1608 | continue\r | |
1609 | pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r | |
1610 | pcd.SkuInfoList[SkuName].SkuId = SkuId\r | |
1611 | pcd.SkuInfoList[SkuName].SkuIdName = SkuName\r | |
1612 | self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList\r | |
1613 | \r | |
1614 | def FixVpdOffset(self, VpdFile ):\r | |
1615 | FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)\r | |
1616 | if not os.path.exists(FvPath):\r | |
1617 | try:\r | |
1618 | os.makedirs(FvPath)\r | |
1619 | except:\r | |
1620 | EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r | |
1621 | \r | |
1622 | VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r | |
1623 | \r | |
1624 | if VpdFile.Write(VpdFilePath):\r | |
1625 | # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r | |
1626 | BPDGToolName = None\r | |
1627 | for ToolDef in self.ToolDefinition.values():\r | |
1628 | if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:\r | |
1629 | if "PATH" not in ToolDef:\r | |
1630 | EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)\r | |
1631 | BPDGToolName = ToolDef["PATH"]\r | |
1632 | break\r | |
1633 | # Call third party GUID BPDG tool.\r | |
1634 | if BPDGToolName is not None:\r | |
1635 | VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r | |
1636 | else:\r | |
1637 | EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r | |
1638 | \r | |
1639 | ## Return the platform build data object\r | |
1640 | @cached_property\r | |
1641 | def Platform(self):\r | |
1642 | return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1643 | \r | |
1644 | ## Return platform name\r | |
1645 | @cached_property\r | |
1646 | def Name(self):\r | |
1647 | return self.Platform.PlatformName\r | |
1648 | \r | |
1649 | ## Return the meta file GUID\r | |
1650 | @cached_property\r | |
1651 | def Guid(self):\r | |
1652 | return self.Platform.Guid\r | |
1653 | \r | |
1654 | ## Return the platform version\r | |
1655 | @cached_property\r | |
1656 | def Version(self):\r | |
1657 | return self.Platform.Version\r | |
1658 | \r | |
1659 | ## Return the FDF file name\r | |
1660 | @cached_property\r | |
1661 | def FdfFile(self):\r | |
1662 | if self.Workspace.FdfFile:\r | |
1663 | RetVal= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)\r | |
1664 | else:\r | |
1665 | RetVal = ''\r | |
1666 | return RetVal\r | |
1667 | \r | |
1668 | ## Return the build output directory platform specifies\r | |
1669 | @cached_property\r | |
1670 | def OutputDir(self):\r | |
1671 | return self.Platform.OutputDirectory\r | |
1672 | \r | |
1673 | ## Return the directory to store all intermediate and final files built\r | |
1674 | @cached_property\r | |
1675 | def BuildDir(self):\r | |
1676 | if os.path.isabs(self.OutputDir):\r | |
1677 | GlobalData.gBuildDirectory = RetVal = path.join(\r | |
1678 | path.abspath(self.OutputDir),\r | |
1679 | self.BuildTarget + "_" + self.ToolChain,\r | |
1680 | )\r | |
1681 | else:\r | |
1682 | GlobalData.gBuildDirectory = RetVal = path.join(\r | |
1683 | self.WorkspaceDir,\r | |
1684 | self.OutputDir,\r | |
1685 | self.BuildTarget + "_" + self.ToolChain,\r | |
1686 | )\r | |
1687 | return RetVal\r | |
1688 | \r | |
1689 | ## Return directory of platform makefile\r | |
1690 | #\r | |
1691 | # @retval string Makefile directory\r | |
1692 | #\r | |
1693 | @cached_property\r | |
1694 | def MakeFileDir(self):\r | |
1695 | return path.join(self.BuildDir, self.Arch)\r | |
1696 | \r | |
1697 | ## Return build command string\r | |
1698 | #\r | |
1699 | # @retval string Build command string\r | |
1700 | #\r | |
1701 | @cached_property\r | |
1702 | def BuildCommand(self):\r | |
1703 | RetVal = []\r | |
1704 | if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:\r | |
1705 | RetVal += SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r | |
1706 | if "FLAGS" in self.ToolDefinition["MAKE"]:\r | |
1707 | NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()\r | |
1708 | if NewOption != '':\r | |
1709 | RetVal += SplitOption(NewOption)\r | |
1710 | if "MAKE" in self.EdkIIBuildOption:\r | |
1711 | if "FLAGS" in self.EdkIIBuildOption["MAKE"]:\r | |
1712 | Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]\r | |
1713 | if Flags.startswith('='):\r | |
1714 | RetVal = [RetVal[0]] + [Flags[1:]]\r | |
1715 | else:\r | |
1716 | RetVal.append(Flags)\r | |
1717 | return RetVal\r | |
1718 | \r | |
1719 | ## Get tool chain definition\r | |
1720 | #\r | |
1721 | # Get each tool defition for given tool chain from tools_def.txt and platform\r | |
1722 | #\r | |
1723 | @cached_property\r | |
1724 | def ToolDefinition(self):\r | |
1725 | ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary\r | |
1726 | if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:\r | |
1727 | EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",\r | |
1728 | ExtraData="[%s]" % self.MetaFile)\r | |
1729 | RetVal = {}\r | |
1730 | DllPathList = set()\r | |
1731 | for Def in ToolDefinition:\r | |
1732 | Target, Tag, Arch, Tool, Attr = Def.split("_")\r | |
1733 | if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:\r | |
1734 | continue\r | |
1735 | \r | |
1736 | Value = ToolDefinition[Def]\r | |
1737 | # don't record the DLL\r | |
1738 | if Attr == "DLL":\r | |
1739 | DllPathList.add(Value)\r | |
1740 | continue\r | |
1741 | \r | |
1742 | if Tool not in RetVal:\r | |
1743 | RetVal[Tool] = {}\r | |
1744 | RetVal[Tool][Attr] = Value\r | |
1745 | \r | |
1746 | ToolsDef = ''\r | |
1747 | if GlobalData.gOptions.SilentMode and "MAKE" in RetVal:\r | |
1748 | if "FLAGS" not in RetVal["MAKE"]:\r | |
1749 | RetVal["MAKE"]["FLAGS"] = ""\r | |
1750 | RetVal["MAKE"]["FLAGS"] += " -s"\r | |
1751 | MakeFlags = ''\r | |
1752 | for Tool in RetVal:\r | |
1753 | for Attr in RetVal[Tool]:\r | |
1754 | Value = RetVal[Tool][Attr]\r | |
1755 | if Tool in self._BuildOptionWithToolDef(RetVal) and Attr in self._BuildOptionWithToolDef(RetVal)[Tool]:\r | |
1756 | # check if override is indicated\r | |
1757 | if self._BuildOptionWithToolDef(RetVal)[Tool][Attr].startswith('='):\r | |
1758 | Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr][1:]\r | |
1759 | else:\r | |
1760 | if Attr != 'PATH':\r | |
1761 | Value += " " + self._BuildOptionWithToolDef(RetVal)[Tool][Attr]\r | |
1762 | else:\r | |
1763 | Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr]\r | |
1764 | \r | |
1765 | if Attr == "PATH":\r | |
1766 | # Don't put MAKE definition in the file\r | |
1767 | if Tool != "MAKE":\r | |
1768 | ToolsDef += "%s = %s\n" % (Tool, Value)\r | |
1769 | elif Attr != "DLL":\r | |
1770 | # Don't put MAKE definition in the file\r | |
1771 | if Tool == "MAKE":\r | |
1772 | if Attr == "FLAGS":\r | |
1773 | MakeFlags = Value\r | |
1774 | else:\r | |
1775 | ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)\r | |
1776 | ToolsDef += "\n"\r | |
1777 | \r | |
1778 | SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)\r | |
1779 | for DllPath in DllPathList:\r | |
1780 | os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]\r | |
1781 | os.environ["MAKE_FLAGS"] = MakeFlags\r | |
1782 | \r | |
1783 | return RetVal\r | |
1784 | \r | |
1785 | ## Return the paths of tools\r | |
1786 | @cached_property\r | |
1787 | def ToolDefinitionFile(self):\r | |
1788 | return os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)\r | |
1789 | \r | |
1790 | ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.\r | |
1791 | @cached_property\r | |
1792 | def ToolChainFamily(self):\r | |
1793 | ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r | |
1794 | if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \\r | |
1795 | or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \\r | |
1796 | or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:\r | |
1797 | EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \\r | |
1798 | % self.ToolChain)\r | |
1799 | RetVal = TAB_COMPILER_MSFT\r | |
1800 | else:\r | |
1801 | RetVal = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]\r | |
1802 | return RetVal\r | |
1803 | \r | |
1804 | @cached_property\r | |
1805 | def BuildRuleFamily(self):\r | |
1806 | ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r | |
1807 | if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \\r | |
1808 | or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \\r | |
1809 | or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:\r | |
1810 | EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \\r | |
1811 | % self.ToolChain)\r | |
1812 | return TAB_COMPILER_MSFT\r | |
1813 | \r | |
1814 | return ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]\r | |
1815 | \r | |
1816 | ## Return the build options specific for all modules in this platform\r | |
1817 | @cached_property\r | |
1818 | def BuildOption(self):\r | |
1819 | return self._ExpandBuildOption(self.Platform.BuildOptions)\r | |
1820 | \r | |
1821 | def _BuildOptionWithToolDef(self, ToolDef):\r | |
1822 | return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)\r | |
1823 | \r | |
1824 | ## Return the build options specific for EDK modules in this platform\r | |
1825 | @cached_property\r | |
1826 | def EdkBuildOption(self):\r | |
1827 | return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)\r | |
1828 | \r | |
1829 | ## Return the build options specific for EDKII modules in this platform\r | |
1830 | @cached_property\r | |
1831 | def EdkIIBuildOption(self):\r | |
1832 | return self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)\r | |
1833 | \r | |
1834 | ## Parse build_rule.txt in Conf Directory.\r | |
1835 | #\r | |
1836 | # @retval BuildRule object\r | |
1837 | #\r | |
1838 | @cached_property\r | |
1839 | def BuildRule(self):\r | |
1840 | BuildRuleFile = None\r | |
1841 | if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:\r | |
1842 | BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]\r | |
1843 | if not BuildRuleFile:\r | |
1844 | BuildRuleFile = gDefaultBuildRuleFile\r | |
1845 | RetVal = BuildRule(BuildRuleFile)\r | |
1846 | if RetVal._FileVersion == "":\r | |
1847 | RetVal._FileVersion = AutoGenReqBuildRuleVerNum\r | |
1848 | else:\r | |
1849 | if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :\r | |
1850 | # If Build Rule's version is less than the version number required by the tools, halting the build.\r | |
1851 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1852 | ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\\r | |
1853 | % (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))\r | |
1854 | return RetVal\r | |
1855 | \r | |
1856 | ## Summarize the packages used by modules in this platform\r | |
1857 | @cached_property\r | |
1858 | def PackageList(self):\r | |
1859 | RetVal = set()\r | |
1860 | for La in self.LibraryAutoGenList:\r | |
1861 | RetVal.update(La.DependentPackageList)\r | |
1862 | for Ma in self.ModuleAutoGenList:\r | |
1863 | RetVal.update(Ma.DependentPackageList)\r | |
1864 | #Collect package set information from INF of FDF\r | |
1865 | for ModuleFile in self._AsBuildModuleList:\r | |
1866 | if ModuleFile in self.Platform.Modules:\r | |
1867 | continue\r | |
1868 | ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1869 | RetVal.update(ModuleData.Packages)\r | |
1870 | return list(RetVal)\r | |
1871 | \r | |
1872 | @cached_property\r | |
1873 | def NonDynamicPcdDict(self):\r | |
1874 | return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}\r | |
1875 | \r | |
1876 | ## Get list of non-dynamic PCDs\r | |
1877 | @cached_property\r | |
1878 | def NonDynamicPcdList(self):\r | |
1879 | self.CollectPlatformDynamicPcds()\r | |
1880 | return self._NonDynamicPcdList\r | |
1881 | \r | |
1882 | ## Get list of dynamic PCDs\r | |
1883 | @cached_property\r | |
1884 | def DynamicPcdList(self):\r | |
1885 | self.CollectPlatformDynamicPcds()\r | |
1886 | return self._DynamicPcdList\r | |
1887 | \r | |
1888 | ## Generate Token Number for all PCD\r | |
1889 | @cached_property\r | |
1890 | def PcdTokenNumber(self):\r | |
1891 | RetVal = OrderedDict()\r | |
1892 | TokenNumber = 1\r | |
1893 | #\r | |
1894 | # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.\r | |
1895 | # Such as:\r | |
1896 | #\r | |
1897 | # Dynamic PCD:\r | |
1898 | # TokenNumber 0 ~ 10\r | |
1899 | # DynamicEx PCD:\r | |
1900 | # TokeNumber 11 ~ 20\r | |
1901 | #\r | |
1902 | for Pcd in self.DynamicPcdList:\r | |
1903 | if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:\r | |
1904 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1905 | RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1906 | TokenNumber += 1\r | |
1907 | \r | |
1908 | for Pcd in self.DynamicPcdList:\r | |
1909 | if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1910 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1911 | RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1912 | TokenNumber += 1\r | |
1913 | \r | |
1914 | for Pcd in self.DynamicPcdList:\r | |
1915 | if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:\r | |
1916 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1917 | RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1918 | TokenNumber += 1\r | |
1919 | \r | |
1920 | for Pcd in self.DynamicPcdList:\r | |
1921 | if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1922 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1923 | RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1924 | TokenNumber += 1\r | |
1925 | \r | |
1926 | for Pcd in self.NonDynamicPcdList:\r | |
1927 | RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1928 | TokenNumber += 1\r | |
1929 | return RetVal\r | |
1930 | \r | |
1931 | @cached_property\r | |
1932 | def _MaList(self):\r | |
1933 | for ModuleFile in self.Platform.Modules:\r | |
1934 | Ma = ModuleAutoGen(\r | |
1935 | self.Workspace,\r | |
1936 | ModuleFile,\r | |
1937 | self.BuildTarget,\r | |
1938 | self.ToolChain,\r | |
1939 | self.Arch,\r | |
1940 | self.MetaFile\r | |
1941 | )\r | |
1942 | self.Platform.Modules[ModuleFile].M = Ma\r | |
1943 | return [x.M for x in self.Platform.Modules.values()]\r | |
1944 | \r | |
1945 | ## Summarize ModuleAutoGen objects of all modules to be built for this platform\r | |
1946 | @cached_property\r | |
1947 | def ModuleAutoGenList(self):\r | |
1948 | RetVal = []\r | |
1949 | for Ma in self._MaList:\r | |
1950 | if Ma not in RetVal:\r | |
1951 | RetVal.append(Ma)\r | |
1952 | return RetVal\r | |
1953 | \r | |
1954 | ## Summarize ModuleAutoGen objects of all libraries to be built for this platform\r | |
1955 | @cached_property\r | |
1956 | def LibraryAutoGenList(self):\r | |
1957 | RetVal = []\r | |
1958 | for Ma in self._MaList:\r | |
1959 | for La in Ma.LibraryAutoGenList:\r | |
1960 | if La not in RetVal:\r | |
1961 | RetVal.append(La)\r | |
1962 | if Ma not in La.ReferenceModules:\r | |
1963 | La.ReferenceModules.append(Ma)\r | |
1964 | return RetVal\r | |
1965 | \r | |
1966 | ## Test if a module is supported by the platform\r | |
1967 | #\r | |
1968 | # An error will be raised directly if the module or its arch is not supported\r | |
1969 | # by the platform or current configuration\r | |
1970 | #\r | |
1971 | def ValidModule(self, Module):\r | |
1972 | return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \\r | |
1973 | or Module in self._AsBuildModuleList\r | |
1974 | \r | |
1975 | ## Resolve the library classes in a module to library instances\r | |
1976 | #\r | |
1977 | # This method will not only resolve library classes but also sort the library\r | |
1978 | # instances according to the dependency-ship.\r | |
1979 | #\r | |
1980 | # @param Module The module from which the library classes will be resolved\r | |
1981 | #\r | |
1982 | # @retval library_list List of library instances sorted\r | |
1983 | #\r | |
1984 | def ApplyLibraryInstance(self, Module):\r | |
1985 | # Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly\r | |
1986 | if str(Module) not in self.Platform.Modules:\r | |
1987 | return []\r | |
1988 | \r | |
1989 | return GetModuleLibInstances(Module,\r | |
1990 | self.Platform,\r | |
1991 | self.BuildDatabase,\r | |
1992 | self.Arch,\r | |
1993 | self.BuildTarget,\r | |
1994 | self.ToolChain,\r | |
1995 | self.MetaFile,\r | |
1996 | EdkLogger)\r | |
1997 | \r | |
1998 | ## Override PCD setting (type, value, ...)\r | |
1999 | #\r | |
2000 | # @param ToPcd The PCD to be overrided\r | |
2001 | # @param FromPcd The PCD overrideing from\r | |
2002 | #\r | |
2003 | def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):\r | |
2004 | #\r | |
2005 | # in case there's PCDs coming from FDF file, which have no type given.\r | |
2006 | # at this point, ToPcd.Type has the type found from dependent\r | |
2007 | # package\r | |
2008 | #\r | |
2009 | TokenCName = ToPcd.TokenCName\r | |
2010 | for PcdItem in GlobalData.MixedPcd:\r | |
2011 | if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
2012 | TokenCName = PcdItem[0]\r | |
2013 | break\r | |
2014 | if FromPcd is not None:\r | |
2015 | if ToPcd.Pending and FromPcd.Type:\r | |
2016 | ToPcd.Type = FromPcd.Type\r | |
2017 | elif ToPcd.Type and FromPcd.Type\\r | |
2018 | and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:\r | |
2019 | if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:\r | |
2020 | ToPcd.Type = FromPcd.Type\r | |
2021 | elif ToPcd.Type and FromPcd.Type \\r | |
2022 | and ToPcd.Type != FromPcd.Type:\r | |
2023 | if Library:\r | |
2024 | Module = str(Module) + " 's library file (" + str(Library) + ")"\r | |
2025 | EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",\r | |
2026 | ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\\r | |
2027 | % (ToPcd.TokenSpaceGuidCName, TokenCName,\r | |
2028 | ToPcd.Type, Module, FromPcd.Type, Msg),\r | |
2029 | File=self.MetaFile)\r | |
2030 | \r | |
2031 | if FromPcd.MaxDatumSize:\r | |
2032 | ToPcd.MaxDatumSize = FromPcd.MaxDatumSize\r | |
2033 | ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize\r | |
2034 | if FromPcd.DefaultValue:\r | |
2035 | ToPcd.DefaultValue = FromPcd.DefaultValue\r | |
2036 | if FromPcd.TokenValue:\r | |
2037 | ToPcd.TokenValue = FromPcd.TokenValue\r | |
2038 | if FromPcd.DatumType:\r | |
2039 | ToPcd.DatumType = FromPcd.DatumType\r | |
2040 | if FromPcd.SkuInfoList:\r | |
2041 | ToPcd.SkuInfoList = FromPcd.SkuInfoList\r | |
2042 | if FromPcd.UserDefinedDefaultStoresFlag:\r | |
2043 | ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag\r | |
2044 | # Add Flexible PCD format parse\r | |
2045 | if ToPcd.DefaultValue:\r | |
2046 | try:\r | |
2047 | ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self.Workspace._GuidDict)(True)\r | |
2048 | except BadExpression as Value:\r | |
2049 | EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),\r | |
2050 | File=self.MetaFile)\r | |
2051 | \r | |
2052 | # check the validation of datum\r | |
2053 | IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)\r | |
2054 | if not IsValid:\r | |
2055 | EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,\r | |
2056 | ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))\r | |
2057 | ToPcd.validateranges = FromPcd.validateranges\r | |
2058 | ToPcd.validlists = FromPcd.validlists\r | |
2059 | ToPcd.expressions = FromPcd.expressions\r | |
2060 | ToPcd.CustomAttribute = FromPcd.CustomAttribute\r | |
2061 | \r | |
2062 | if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:\r | |
2063 | EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \\r | |
2064 | % (ToPcd.TokenSpaceGuidCName, TokenCName))\r | |
2065 | Value = ToPcd.DefaultValue\r | |
2066 | if not Value:\r | |
2067 | ToPcd.MaxDatumSize = '1'\r | |
2068 | elif Value[0] == 'L':\r | |
2069 | ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)\r | |
2070 | elif Value[0] == '{':\r | |
2071 | ToPcd.MaxDatumSize = str(len(Value.split(',')))\r | |
2072 | else:\r | |
2073 | ToPcd.MaxDatumSize = str(len(Value) - 1)\r | |
2074 | \r | |
2075 | # apply default SKU for dynamic PCDS if specified one is not available\r | |
2076 | if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \\r | |
2077 | and not ToPcd.SkuInfoList:\r | |
2078 | if self.Platform.SkuName in self.Platform.SkuIds:\r | |
2079 | SkuName = self.Platform.SkuName\r | |
2080 | else:\r | |
2081 | SkuName = TAB_DEFAULT\r | |
2082 | ToPcd.SkuInfoList = {\r | |
2083 | SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)\r | |
2084 | }\r | |
2085 | \r | |
2086 | ## Apply PCD setting defined platform to a module\r | |
2087 | #\r | |
2088 | # @param Module The module from which the PCD setting will be overrided\r | |
2089 | #\r | |
2090 | # @retval PCD_list The list PCDs with settings from platform\r | |
2091 | #\r | |
2092 | def ApplyPcdSetting(self, Module, Pcds, Library=""):\r | |
2093 | # for each PCD in module\r | |
2094 | for Name, Guid in Pcds:\r | |
2095 | PcdInModule = Pcds[Name, Guid]\r | |
2096 | # find out the PCD setting in platform\r | |
2097 | if (Name, Guid) in self.Platform.Pcds:\r | |
2098 | PcdInPlatform = self.Platform.Pcds[Name, Guid]\r | |
2099 | else:\r | |
2100 | PcdInPlatform = None\r | |
2101 | # then override the settings if any\r | |
2102 | self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)\r | |
2103 | # resolve the VariableGuid value\r | |
2104 | for SkuId in PcdInModule.SkuInfoList:\r | |
2105 | Sku = PcdInModule.SkuInfoList[SkuId]\r | |
2106 | if Sku.VariableGuid == '': continue\r | |
2107 | Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)\r | |
2108 | if Sku.VariableGuidValue is None:\r | |
2109 | PackageList = "\n\t".join(str(P) for P in self.PackageList)\r | |
2110 | EdkLogger.error(\r | |
2111 | 'build',\r | |
2112 | RESOURCE_NOT_AVAILABLE,\r | |
2113 | "Value of GUID [%s] is not found in" % Sku.VariableGuid,\r | |
2114 | ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \\r | |
2115 | % (Guid, Name, str(Module)),\r | |
2116 | File=self.MetaFile\r | |
2117 | )\r | |
2118 | \r | |
2119 | # override PCD settings with module specific setting\r | |
2120 | if Module in self.Platform.Modules:\r | |
2121 | PlatformModule = self.Platform.Modules[str(Module)]\r | |
2122 | for Key in PlatformModule.Pcds:\r | |
2123 | Flag = False\r | |
2124 | if Key in Pcds:\r | |
2125 | ToPcd = Pcds[Key]\r | |
2126 | Flag = True\r | |
2127 | elif Key in GlobalData.MixedPcd:\r | |
2128 | for PcdItem in GlobalData.MixedPcd[Key]:\r | |
2129 | if PcdItem in Pcds:\r | |
2130 | ToPcd = Pcds[PcdItem]\r | |
2131 | Flag = True\r | |
2132 | break\r | |
2133 | if Flag:\r | |
2134 | self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)\r | |
2135 | # use PCD value to calculate the MaxDatumSize when it is not specified\r | |
2136 | for Name, Guid in Pcds:\r | |
2137 | Pcd = Pcds[Name, Guid]\r | |
2138 | if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:\r | |
2139 | Pcd.MaxSizeUserSet = None\r | |
2140 | Value = Pcd.DefaultValue\r | |
2141 | if not Value:\r | |
2142 | Pcd.MaxDatumSize = '1'\r | |
2143 | elif Value[0] == 'L':\r | |
2144 | Pcd.MaxDatumSize = str((len(Value) - 2) * 2)\r | |
2145 | elif Value[0] == '{':\r | |
2146 | Pcd.MaxDatumSize = str(len(Value.split(',')))\r | |
2147 | else:\r | |
2148 | Pcd.MaxDatumSize = str(len(Value) - 1)\r | |
2149 | return Pcds.values()\r | |
2150 | \r | |
2151 | ## Resolve library names to library modules\r | |
2152 | #\r | |
2153 | # (for Edk.x modules)\r | |
2154 | #\r | |
2155 | # @param Module The module from which the library names will be resolved\r | |
2156 | #\r | |
2157 | # @retval library_list The list of library modules\r | |
2158 | #\r | |
2159 | def ResolveLibraryReference(self, Module):\r | |
2160 | EdkLogger.verbose("")\r | |
2161 | EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))\r | |
2162 | LibraryConsumerList = [Module]\r | |
2163 | \r | |
2164 | # "CompilerStub" is a must for Edk modules\r | |
2165 | if Module.Libraries:\r | |
2166 | Module.Libraries.append("CompilerStub")\r | |
2167 | LibraryList = []\r | |
2168 | while len(LibraryConsumerList) > 0:\r | |
2169 | M = LibraryConsumerList.pop()\r | |
2170 | for LibraryName in M.Libraries:\r | |
2171 | Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']\r | |
2172 | if Library is None:\r | |
2173 | for Key in self.Platform.LibraryClasses.data:\r | |
2174 | if LibraryName.upper() == Key.upper():\r | |
2175 | Library = self.Platform.LibraryClasses[Key, ':dummy:']\r | |
2176 | break\r | |
2177 | if Library is None:\r | |
2178 | EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),\r | |
2179 | ExtraData="\t%s [%s]" % (str(Module), self.Arch))\r | |
2180 | continue\r | |
2181 | \r | |
2182 | if Library not in LibraryList:\r | |
2183 | LibraryList.append(Library)\r | |
2184 | LibraryConsumerList.append(Library)\r | |
2185 | EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library)))\r | |
2186 | return LibraryList\r | |
2187 | \r | |
2188 | ## Calculate the priority value of the build option\r | |
2189 | #\r | |
2190 | # @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
2191 | #\r | |
2192 | # @retval Value Priority value based on the priority list.\r | |
2193 | #\r | |
2194 | def CalculatePriorityValue(self, Key):\r | |
2195 | Target, ToolChain, Arch, CommandType, Attr = Key.split('_')\r | |
2196 | PriorityValue = 0x11111\r | |
2197 | if Target == "*":\r | |
2198 | PriorityValue &= 0x01111\r | |
2199 | if ToolChain == "*":\r | |
2200 | PriorityValue &= 0x10111\r | |
2201 | if Arch == "*":\r | |
2202 | PriorityValue &= 0x11011\r | |
2203 | if CommandType == "*":\r | |
2204 | PriorityValue &= 0x11101\r | |
2205 | if Attr == "*":\r | |
2206 | PriorityValue &= 0x11110\r | |
2207 | \r | |
2208 | return self.PrioList["0x%0.5x" % PriorityValue]\r | |
2209 | \r | |
2210 | \r | |
2211 | ## Expand * in build option key\r | |
2212 | #\r | |
2213 | # @param Options Options to be expanded\r | |
2214 | # @param ToolDef Use specified ToolDef instead of full version.\r | |
2215 | # This is needed during initialization to prevent\r | |
2216 | # infinite recursion betweeh BuildOptions,\r | |
2217 | # ToolDefinition, and this function.\r | |
2218 | #\r | |
2219 | # @retval options Options expanded\r | |
2220 | #\r | |
2221 | def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):\r | |
2222 | if not ToolDef:\r | |
2223 | ToolDef = self.ToolDefinition\r | |
2224 | BuildOptions = {}\r | |
2225 | FamilyMatch = False\r | |
2226 | FamilyIsNull = True\r | |
2227 | \r | |
2228 | OverrideList = {}\r | |
2229 | #\r | |
2230 | # Construct a list contain the build options which need override.\r | |
2231 | #\r | |
2232 | for Key in Options:\r | |
2233 | #\r | |
2234 | # Key[0] -- tool family\r | |
2235 | # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
2236 | #\r | |
2237 | if (Key[0] == self.BuildRuleFamily and\r | |
2238 | (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):\r | |
2239 | Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')\r | |
2240 | if (Target == self.BuildTarget or Target == "*") and\\r | |
2241 | (ToolChain == self.ToolChain or ToolChain == "*") and\\r | |
2242 | (Arch == self.Arch or Arch == "*") and\\r | |
2243 | Options[Key].startswith("="):\r | |
2244 | \r | |
2245 | if OverrideList.get(Key[1]) is not None:\r | |
2246 | OverrideList.pop(Key[1])\r | |
2247 | OverrideList[Key[1]] = Options[Key]\r | |
2248 | \r | |
2249 | #\r | |
2250 | # Use the highest priority value.\r | |
2251 | #\r | |
2252 | if (len(OverrideList) >= 2):\r | |
2253 | KeyList = OverrideList.keys()\r | |
2254 | for Index in range(len(KeyList)):\r | |
2255 | NowKey = KeyList[Index]\r | |
2256 | Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")\r | |
2257 | for Index1 in range(len(KeyList) - Index - 1):\r | |
2258 | NextKey = KeyList[Index1 + Index + 1]\r | |
2259 | #\r | |
2260 | # Compare two Key, if one is included by another, choose the higher priority one\r | |
2261 | #\r | |
2262 | Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")\r | |
2263 | if (Target1 == Target2 or Target1 == "*" or Target2 == "*") and\\r | |
2264 | (ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*") and\\r | |
2265 | (Arch1 == Arch2 or Arch1 == "*" or Arch2 == "*") and\\r | |
2266 | (CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*") and\\r | |
2267 | (Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*"):\r | |
2268 | \r | |
2269 | if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):\r | |
2270 | if Options.get((self.BuildRuleFamily, NextKey)) is not None:\r | |
2271 | Options.pop((self.BuildRuleFamily, NextKey))\r | |
2272 | else:\r | |
2273 | if Options.get((self.BuildRuleFamily, NowKey)) is not None:\r | |
2274 | Options.pop((self.BuildRuleFamily, NowKey))\r | |
2275 | \r | |
2276 | for Key in Options:\r | |
2277 | if ModuleStyle is not None and len (Key) > 2:\r | |
2278 | # Check Module style is EDK or EDKII.\r | |
2279 | # Only append build option for the matched style module.\r | |
2280 | if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r | |
2281 | continue\r | |
2282 | elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:\r | |
2283 | continue\r | |
2284 | Family = Key[0]\r | |
2285 | Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r | |
2286 | # if tool chain family doesn't match, skip it\r | |
2287 | if Tool in ToolDef and Family != "":\r | |
2288 | FamilyIsNull = False\r | |
2289 | if ToolDef[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":\r | |
2290 | if Family != ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:\r | |
2291 | continue\r | |
2292 | elif Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:\r | |
2293 | continue\r | |
2294 | FamilyMatch = True\r | |
2295 | # expand any wildcard\r | |
2296 | if Target == "*" or Target == self.BuildTarget:\r | |
2297 | if Tag == "*" or Tag == self.ToolChain:\r | |
2298 | if Arch == "*" or Arch == self.Arch:\r | |
2299 | if Tool not in BuildOptions:\r | |
2300 | BuildOptions[Tool] = {}\r | |
2301 | if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r | |
2302 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2303 | else:\r | |
2304 | # append options for the same tool except PATH\r | |
2305 | if Attr != 'PATH':\r | |
2306 | BuildOptions[Tool][Attr] += " " + Options[Key]\r | |
2307 | else:\r | |
2308 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2309 | # Build Option Family has been checked, which need't to be checked again for family.\r | |
2310 | if FamilyMatch or FamilyIsNull:\r | |
2311 | return BuildOptions\r | |
2312 | \r | |
2313 | for Key in Options:\r | |
2314 | if ModuleStyle is not None and len (Key) > 2:\r | |
2315 | # Check Module style is EDK or EDKII.\r | |
2316 | # Only append build option for the matched style module.\r | |
2317 | if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r | |
2318 | continue\r | |
2319 | elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:\r | |
2320 | continue\r | |
2321 | Family = Key[0]\r | |
2322 | Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r | |
2323 | # if tool chain family doesn't match, skip it\r | |
2324 | if Tool not in ToolDef or Family == "":\r | |
2325 | continue\r | |
2326 | # option has been added before\r | |
2327 | if Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:\r | |
2328 | continue\r | |
2329 | \r | |
2330 | # expand any wildcard\r | |
2331 | if Target == "*" or Target == self.BuildTarget:\r | |
2332 | if Tag == "*" or Tag == self.ToolChain:\r | |
2333 | if Arch == "*" or Arch == self.Arch:\r | |
2334 | if Tool not in BuildOptions:\r | |
2335 | BuildOptions[Tool] = {}\r | |
2336 | if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r | |
2337 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2338 | else:\r | |
2339 | # append options for the same tool except PATH\r | |
2340 | if Attr != 'PATH':\r | |
2341 | BuildOptions[Tool][Attr] += " " + Options[Key]\r | |
2342 | else:\r | |
2343 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2344 | return BuildOptions\r | |
2345 | \r | |
2346 | ## Append build options in platform to a module\r | |
2347 | #\r | |
2348 | # @param Module The module to which the build options will be appened\r | |
2349 | #\r | |
2350 | # @retval options The options appended with build options in platform\r | |
2351 | #\r | |
2352 | def ApplyBuildOption(self, Module):\r | |
2353 | # Get the different options for the different style module\r | |
2354 | if Module.AutoGenVersion < 0x00010005:\r | |
2355 | PlatformOptions = self.EdkBuildOption\r | |
2356 | ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDK_NAME, Module.ModuleType)\r | |
2357 | else:\r | |
2358 | PlatformOptions = self.EdkIIBuildOption\r | |
2359 | ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)\r | |
2360 | ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)\r | |
2361 | ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)\r | |
2362 | if Module in self.Platform.Modules:\r | |
2363 | PlatformModule = self.Platform.Modules[str(Module)]\r | |
2364 | PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)\r | |
2365 | else:\r | |
2366 | PlatformModuleOptions = {}\r | |
2367 | \r | |
2368 | BuildRuleOrder = None\r | |
2369 | for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r | |
2370 | for Tool in Options:\r | |
2371 | for Attr in Options[Tool]:\r | |
2372 | if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r | |
2373 | BuildRuleOrder = Options[Tool][Attr]\r | |
2374 | \r | |
2375 | AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +\r | |
2376 | PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +\r | |
2377 | self.ToolDefinition.keys())\r | |
2378 | BuildOptions = defaultdict(lambda: defaultdict(str))\r | |
2379 | for Tool in AllTools:\r | |
2380 | for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r | |
2381 | if Tool not in Options:\r | |
2382 | continue\r | |
2383 | for Attr in Options[Tool]:\r | |
2384 | #\r | |
2385 | # Do not generate it in Makefile\r | |
2386 | #\r | |
2387 | if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r | |
2388 | continue\r | |
2389 | Value = Options[Tool][Attr]\r | |
2390 | # check if override is indicated\r | |
2391 | if Value.startswith('='):\r | |
2392 | BuildOptions[Tool][Attr] = mws.handleWsMacro(Value[1:])\r | |
2393 | else:\r | |
2394 | if Attr != 'PATH':\r | |
2395 | BuildOptions[Tool][Attr] += " " + mws.handleWsMacro(Value)\r | |
2396 | else:\r | |
2397 | BuildOptions[Tool][Attr] = mws.handleWsMacro(Value)\r | |
2398 | \r | |
2399 | if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:\r | |
2400 | #\r | |
2401 | # Override UNI flag only for EDK module.\r | |
2402 | #\r | |
2403 | BuildOptions['BUILD']['FLAGS'] = self.Workspace.UniFlag\r | |
2404 | return BuildOptions, BuildRuleOrder\r | |
2405 | \r | |
2406 | #\r | |
2407 | # extend lists contained in a dictionary with lists stored in another dictionary\r | |
2408 | # if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r | |
2409 | #\r | |
2410 | def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r | |
2411 | for Key in CopyFromDict:\r | |
2412 | CopyToDict[Key].extend(CopyFromDict[Key])\r | |
2413 | \r | |
2414 | # Create a directory specified by a set of path elements and return the full path\r | |
2415 | def _MakeDir(PathList):\r | |
2416 | RetVal = path.join(*PathList)\r | |
2417 | CreateDirectory(RetVal)\r | |
2418 | return RetVal\r | |
2419 | \r | |
2420 | ## ModuleAutoGen class\r | |
2421 | #\r | |
2422 | # This class encapsules the AutoGen behaviors for the build tools. In addition to\r | |
2423 | # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r | |
2424 | # to the [depex] section in module's inf file.\r | |
2425 | #\r | |
2426 | class ModuleAutoGen(AutoGen):\r | |
2427 | # call super().__init__ then call the worker function with different parameter count\r | |
2428 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
2429 | if not hasattr(self, "_Init"):\r | |
2430 | super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
2431 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r | |
2432 | self._Init = True\r | |
2433 | \r | |
2434 | ## Cache the timestamps of metafiles of every module in a class attribute\r | |
2435 | #\r | |
2436 | TimeDict = {}\r | |
2437 | \r | |
2438 | def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
2439 | # check if this module is employed by active platform\r | |
2440 | if not PlatformAutoGen(Workspace, args[0], Target, Toolchain, Arch).ValidModule(MetaFile):\r | |
2441 | EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r | |
2442 | % (MetaFile, Arch))\r | |
2443 | return None\r | |
2444 | return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
2445 | \r | |
2446 | ## Initialize ModuleAutoGen\r | |
2447 | #\r | |
2448 | # @param Workspace EdkIIWorkspaceBuild object\r | |
2449 | # @param ModuleFile The path of module file\r | |
2450 | # @param Target Build target (DEBUG, RELEASE)\r | |
2451 | # @param Toolchain Name of tool chain\r | |
2452 | # @param Arch The arch the module supports\r | |
2453 | # @param PlatformFile Platform meta-file\r | |
2454 | #\r | |
2455 | def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile):\r | |
2456 | EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r | |
2457 | GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r | |
2458 | \r | |
2459 | self.Workspace = Workspace\r | |
2460 | self.WorkspaceDir = Workspace.WorkspaceDir\r | |
2461 | self.MetaFile = ModuleFile\r | |
2462 | self.PlatformInfo = PlatformAutoGen(Workspace, PlatformFile, Target, Toolchain, Arch)\r | |
2463 | \r | |
2464 | self.SourceDir = self.MetaFile.SubDir\r | |
2465 | self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r | |
2466 | \r | |
2467 | self.SourceOverrideDir = None\r | |
2468 | # use overrided path defined in DSC file\r | |
2469 | if self.MetaFile.Key in GlobalData.gOverrideDir:\r | |
2470 | self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]\r | |
2471 | \r | |
2472 | self.ToolChain = Toolchain\r | |
2473 | self.BuildTarget = Target\r | |
2474 | self.Arch = Arch\r | |
2475 | self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r | |
2476 | self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r | |
2477 | \r | |
2478 | self.IsCodeFileCreated = False\r | |
2479 | self.IsAsBuiltInfCreated = False\r | |
2480 | self.DepexGenerated = False\r | |
2481 | \r | |
2482 | self.BuildDatabase = self.Workspace.BuildDatabase\r | |
2483 | self.BuildRuleOrder = None\r | |
2484 | self.BuildTime = 0\r | |
2485 | \r | |
2486 | self._PcdComments = OrderedListDict()\r | |
2487 | self._GuidComments = OrderedListDict()\r | |
2488 | self._ProtocolComments = OrderedListDict()\r | |
2489 | self._PpiComments = OrderedListDict()\r | |
2490 | self._BuildTargets = None\r | |
2491 | self._IntroBuildTargetList = None\r | |
2492 | self._FinalBuildTargetList = None\r | |
2493 | self._FileTypes = None\r | |
2494 | \r | |
2495 | self.AutoGenDepSet = set()\r | |
2496 | self.ReferenceModules = []\r | |
2497 | self.ConstPcd = {}\r | |
2498 | \r | |
2499 | \r | |
2500 | def __repr__(self):\r | |
2501 | return "%s [%s]" % (self.MetaFile, self.Arch)\r | |
2502 | \r | |
2503 | # Get FixedAtBuild Pcds of this Module\r | |
2504 | @cached_property\r | |
2505 | def FixedAtBuildPcds(self):\r | |
2506 | RetVal = []\r | |
2507 | for Pcd in self.ModulePcdList:\r | |
2508 | if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r | |
2509 | continue\r | |
2510 | if Pcd not in RetVal:\r | |
2511 | RetVal.append(Pcd)\r | |
2512 | return RetVal\r | |
2513 | \r | |
2514 | @cached_property\r | |
2515 | def FixedVoidTypePcds(self):\r | |
2516 | RetVal = {}\r | |
2517 | for Pcd in self.FixedAtBuildPcds:\r | |
2518 | if Pcd.DatumType == TAB_VOID:\r | |
2519 | if '{}.{}'.format(Pcd.TokenSpaceGuidCName, Pcd.TokenCName) not in RetVal:\r | |
2520 | RetVal['{}.{}'.format(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)] = Pcd.DefaultValue\r | |
2521 | return RetVal\r | |
2522 | \r | |
2523 | @property\r | |
2524 | def UniqueBaseName(self):\r | |
2525 | BaseName = self.Name\r | |
2526 | for Module in self.PlatformInfo.ModuleAutoGenList:\r | |
2527 | if Module.MetaFile == self.MetaFile:\r | |
2528 | continue\r | |
2529 | if Module.Name == self.Name:\r | |
2530 | if uuid.UUID(Module.Guid) == uuid.UUID(self.Guid):\r | |
2531 | EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'\r | |
2532 | ' %s\n %s' % (Module.MetaFile, self.MetaFile))\r | |
2533 | BaseName = '%s_%s' % (self.Name, self.Guid)\r | |
2534 | return BaseName\r | |
2535 | \r | |
2536 | # Macros could be used in build_rule.txt (also Makefile)\r | |
2537 | @cached_property\r | |
2538 | def Macros(self):\r | |
2539 | return OrderedDict((\r | |
2540 | ("WORKSPACE" ,self.WorkspaceDir),\r | |
2541 | ("MODULE_NAME" ,self.Name),\r | |
2542 | ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r | |
2543 | ("MODULE_GUID" ,self.Guid),\r | |
2544 | ("MODULE_VERSION" ,self.Version),\r | |
2545 | ("MODULE_TYPE" ,self.ModuleType),\r | |
2546 | ("MODULE_FILE" ,str(self.MetaFile)),\r | |
2547 | ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r | |
2548 | ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r | |
2549 | ("MODULE_DIR" ,self.SourceDir),\r | |
2550 | ("BASE_NAME" ,self.Name),\r | |
2551 | ("ARCH" ,self.Arch),\r | |
2552 | ("TOOLCHAIN" ,self.ToolChain),\r | |
2553 | ("TOOLCHAIN_TAG" ,self.ToolChain),\r | |
2554 | ("TOOL_CHAIN_TAG" ,self.ToolChain),\r | |
2555 | ("TARGET" ,self.BuildTarget),\r | |
2556 | ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r | |
2557 | ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
2558 | ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
2559 | ("MODULE_BUILD_DIR" ,self.BuildDir),\r | |
2560 | ("OUTPUT_DIR" ,self.OutputDir),\r | |
2561 | ("DEBUG_DIR" ,self.DebugDir),\r | |
2562 | ("DEST_DIR_OUTPUT" ,self.OutputDir),\r | |
2563 | ("DEST_DIR_DEBUG" ,self.DebugDir),\r | |
2564 | ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r | |
2565 | ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r | |
2566 | ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r | |
2567 | ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r | |
2568 | ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r | |
2569 | ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r | |
2570 | ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r | |
2571 | ))\r | |
2572 | \r | |
2573 | ## Return the module build data object\r | |
2574 | @cached_property\r | |
2575 | def Module(self):\r | |
2576 | return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
2577 | \r | |
2578 | ## Return the module name\r | |
2579 | @cached_property\r | |
2580 | def Name(self):\r | |
2581 | return self.Module.BaseName\r | |
2582 | \r | |
2583 | ## Return the module DxsFile if exist\r | |
2584 | @cached_property\r | |
2585 | def DxsFile(self):\r | |
2586 | return self.Module.DxsFile\r | |
2587 | \r | |
2588 | ## Return the module meta-file GUID\r | |
2589 | @cached_property\r | |
2590 | def Guid(self):\r | |
2591 | #\r | |
2592 | # To build same module more than once, the module path with FILE_GUID overridden has\r | |
2593 | # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path\r | |
2594 | # in DSC. The overridden GUID can be retrieved from file name\r | |
2595 | #\r | |
2596 | if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r | |
2597 | #\r | |
2598 | # Length of GUID is 36\r | |
2599 | #\r | |
2600 | return os.path.basename(self.MetaFile.Path)[:36]\r | |
2601 | return self.Module.Guid\r | |
2602 | \r | |
2603 | ## Return the module version\r | |
2604 | @cached_property\r | |
2605 | def Version(self):\r | |
2606 | return self.Module.Version\r | |
2607 | \r | |
2608 | ## Return the module type\r | |
2609 | @cached_property\r | |
2610 | def ModuleType(self):\r | |
2611 | return self.Module.ModuleType\r | |
2612 | \r | |
2613 | ## Return the component type (for Edk.x style of module)\r | |
2614 | @cached_property\r | |
2615 | def ComponentType(self):\r | |
2616 | return self.Module.ComponentType\r | |
2617 | \r | |
2618 | ## Return the build type\r | |
2619 | @cached_property\r | |
2620 | def BuildType(self):\r | |
2621 | return self.Module.BuildType\r | |
2622 | \r | |
2623 | ## Return the PCD_IS_DRIVER setting\r | |
2624 | @cached_property\r | |
2625 | def PcdIsDriver(self):\r | |
2626 | return self.Module.PcdIsDriver\r | |
2627 | \r | |
2628 | ## Return the autogen version, i.e. module meta-file version\r | |
2629 | @cached_property\r | |
2630 | def AutoGenVersion(self):\r | |
2631 | return self.Module.AutoGenVersion\r | |
2632 | \r | |
2633 | ## Check if the module is library or not\r | |
2634 | @cached_property\r | |
2635 | def IsLibrary(self):\r | |
2636 | return bool(self.Module.LibraryClass)\r | |
2637 | \r | |
2638 | ## Check if the module is binary module or not\r | |
2639 | @cached_property\r | |
2640 | def IsBinaryModule(self):\r | |
2641 | return self.Module.IsBinaryModule\r | |
2642 | \r | |
2643 | ## Return the directory to store intermediate files of the module\r | |
2644 | @cached_property\r | |
2645 | def BuildDir(self):\r | |
2646 | return _MakeDir((\r | |
2647 | self.PlatformInfo.BuildDir,\r | |
2648 | self.Arch,\r | |
2649 | self.SourceDir,\r | |
2650 | self.MetaFile.BaseName\r | |
2651 | ))\r | |
2652 | \r | |
2653 | ## Return the directory to store the intermediate object files of the mdoule\r | |
2654 | @cached_property\r | |
2655 | def OutputDir(self):\r | |
2656 | return _MakeDir((self.BuildDir, "OUTPUT"))\r | |
2657 | \r | |
2658 | ## Return the directory path to store ffs file\r | |
2659 | @cached_property\r | |
2660 | def FfsOutputDir(self):\r | |
2661 | if GlobalData.gFdfParser:\r | |
2662 | return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r | |
2663 | return ''\r | |
2664 | \r | |
2665 | ## Return the directory to store auto-gened source files of the mdoule\r | |
2666 | @cached_property\r | |
2667 | def DebugDir(self):\r | |
2668 | return _MakeDir((self.BuildDir, "DEBUG"))\r | |
2669 | \r | |
2670 | ## Return the path of custom file\r | |
2671 | @cached_property\r | |
2672 | def CustomMakefile(self):\r | |
2673 | RetVal = {}\r | |
2674 | for Type in self.Module.CustomMakefile:\r | |
2675 | MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r | |
2676 | if self.SourceOverrideDir is not None:\r | |
2677 | File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])\r | |
2678 | if not os.path.exists(File):\r | |
2679 | File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r | |
2680 | else:\r | |
2681 | File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r | |
2682 | RetVal[MakeType] = File\r | |
2683 | return RetVal\r | |
2684 | \r | |
2685 | ## Return the directory of the makefile\r | |
2686 | #\r | |
2687 | # @retval string The directory string of module's makefile\r | |
2688 | #\r | |
2689 | @cached_property\r | |
2690 | def MakeFileDir(self):\r | |
2691 | return self.BuildDir\r | |
2692 | \r | |
2693 | ## Return build command string\r | |
2694 | #\r | |
2695 | # @retval string Build command string\r | |
2696 | #\r | |
2697 | @cached_property\r | |
2698 | def BuildCommand(self):\r | |
2699 | return self.PlatformInfo.BuildCommand\r | |
2700 | \r | |
2701 | ## Get object list of all packages the module and its dependent libraries belong to\r | |
2702 | #\r | |
2703 | # @retval list The list of package object\r | |
2704 | #\r | |
2705 | @cached_property\r | |
2706 | def DerivedPackageList(self):\r | |
2707 | PackageList = []\r | |
2708 | for M in [self.Module] + self.DependentLibraryList:\r | |
2709 | for Package in M.Packages:\r | |
2710 | if Package in PackageList:\r | |
2711 | continue\r | |
2712 | PackageList.append(Package)\r | |
2713 | return PackageList\r | |
2714 | \r | |
2715 | ## Get the depex string\r | |
2716 | #\r | |
2717 | # @return : a string contain all depex expresion.\r | |
2718 | def _GetDepexExpresionString(self):\r | |
2719 | DepexStr = ''\r | |
2720 | DepexList = []\r | |
2721 | ## DPX_SOURCE IN Define section.\r | |
2722 | if self.Module.DxsFile:\r | |
2723 | return DepexStr\r | |
2724 | for M in [self.Module] + self.DependentLibraryList:\r | |
2725 | Filename = M.MetaFile.Path\r | |
2726 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
2727 | DepexExpresionList = InfObj.GetDepexExpresionList()\r | |
2728 | for DepexExpresion in DepexExpresionList:\r | |
2729 | for key in DepexExpresion:\r | |
2730 | Arch, ModuleType = key\r | |
2731 | DepexExpr = [x for x in DepexExpresion[key] if not str(x).startswith('#')]\r | |
2732 | # the type of build module is USER_DEFINED.\r | |
2733 | # All different DEPEX section tags would be copied into the As Built INF file\r | |
2734 | # and there would be separate DEPEX section tags\r | |
2735 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:\r | |
2736 | if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r | |
2737 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
2738 | else:\r | |
2739 | if Arch.upper() == TAB_ARCH_COMMON or \\r | |
2740 | (Arch.upper() == self.Arch.upper() and \\r | |
2741 | ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r | |
2742 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
2743 | \r | |
2744 | #the type of build module is USER_DEFINED.\r | |
2745 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:\r | |
2746 | for Depex in DepexList:\r | |
2747 | for key in Depex:\r | |
2748 | DepexStr += '[Depex.%s.%s]\n' % key\r | |
2749 | DepexStr += '\n'.join('# '+ val for val in Depex[key])\r | |
2750 | DepexStr += '\n\n'\r | |
2751 | if not DepexStr:\r | |
2752 | return '[Depex.%s]\n' % self.Arch\r | |
2753 | return DepexStr\r | |
2754 | \r | |
2755 | #the type of build module not is USER_DEFINED.\r | |
2756 | Count = 0\r | |
2757 | for Depex in DepexList:\r | |
2758 | Count += 1\r | |
2759 | if DepexStr != '':\r | |
2760 | DepexStr += ' AND '\r | |
2761 | DepexStr += '('\r | |
2762 | for D in Depex.values():\r | |
2763 | DepexStr += ' '.join(val for val in D)\r | |
2764 | Index = DepexStr.find('END')\r | |
2765 | if Index > -1 and Index == len(DepexStr) - 3:\r | |
2766 | DepexStr = DepexStr[:-3]\r | |
2767 | DepexStr = DepexStr.strip()\r | |
2768 | DepexStr += ')'\r | |
2769 | if Count == 1:\r | |
2770 | DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r | |
2771 | if not DepexStr:\r | |
2772 | return '[Depex.%s]\n' % self.Arch\r | |
2773 | return '[Depex.%s]\n# ' % self.Arch + DepexStr\r | |
2774 | \r | |
2775 | ## Merge dependency expression\r | |
2776 | #\r | |
2777 | # @retval list The token list of the dependency expression after parsed\r | |
2778 | #\r | |
2779 | @cached_property\r | |
2780 | def DepexList(self):\r | |
2781 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
2782 | return {}\r | |
2783 | \r | |
2784 | DepexList = []\r | |
2785 | #\r | |
2786 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r | |
2787 | #\r | |
2788 | for M in [self.Module] + self.DependentLibraryList:\r | |
2789 | Inherited = False\r | |
2790 | for D in M.Depex[self.Arch, self.ModuleType]:\r | |
2791 | if DepexList != []:\r | |
2792 | DepexList.append('AND')\r | |
2793 | DepexList.append('(')\r | |
2794 | #replace D with value if D is FixedAtBuild PCD\r | |
2795 | NewList = []\r | |
2796 | for item in D:\r | |
2797 | if '.' not in item:\r | |
2798 | NewList.append(item)\r | |
2799 | else:\r | |
2800 | if item not in self._FixedPcdVoidTypeDict:\r | |
2801 | EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r | |
2802 | else:\r | |
2803 | Value = self._FixedPcdVoidTypeDict[item]\r | |
2804 | if len(Value.split(',')) != 16:\r | |
2805 | EdkLogger.error("build", FORMAT_INVALID,\r | |
2806 | "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r | |
2807 | NewList.append(Value)\r | |
2808 | DepexList.extend(NewList)\r | |
2809 | if DepexList[-1] == 'END': # no need of a END at this time\r | |
2810 | DepexList.pop()\r | |
2811 | DepexList.append(')')\r | |
2812 | Inherited = True\r | |
2813 | if Inherited:\r | |
2814 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexList))\r | |
2815 | if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r | |
2816 | break\r | |
2817 | if len(DepexList) > 0:\r | |
2818 | EdkLogger.verbose('')\r | |
2819 | return {self.ModuleType:DepexList}\r | |
2820 | \r | |
2821 | ## Merge dependency expression\r | |
2822 | #\r | |
2823 | # @retval list The token list of the dependency expression after parsed\r | |
2824 | #\r | |
2825 | @cached_property\r | |
2826 | def DepexExpressionDict(self):\r | |
2827 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
2828 | return {}\r | |
2829 | \r | |
2830 | DepexExpressionString = ''\r | |
2831 | #\r | |
2832 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r | |
2833 | #\r | |
2834 | for M in [self.Module] + self.DependentLibraryList:\r | |
2835 | Inherited = False\r | |
2836 | for D in M.DepexExpression[self.Arch, self.ModuleType]:\r | |
2837 | if DepexExpressionString != '':\r | |
2838 | DepexExpressionString += ' AND '\r | |
2839 | DepexExpressionString += '('\r | |
2840 | DepexExpressionString += D\r | |
2841 | DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r | |
2842 | DepexExpressionString += ')'\r | |
2843 | Inherited = True\r | |
2844 | if Inherited:\r | |
2845 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r | |
2846 | if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r | |
2847 | break\r | |
2848 | if len(DepexExpressionString) > 0:\r | |
2849 | EdkLogger.verbose('')\r | |
2850 | \r | |
2851 | return {self.ModuleType:DepexExpressionString}\r | |
2852 | \r | |
2853 | # Get the tiano core user extension, it is contain dependent library.\r | |
2854 | # @retval: a list contain tiano core userextension.\r | |
2855 | #\r | |
2856 | def _GetTianoCoreUserExtensionList(self):\r | |
2857 | TianoCoreUserExtentionList = []\r | |
2858 | for M in [self.Module] + self.DependentLibraryList:\r | |
2859 | Filename = M.MetaFile.Path\r | |
2860 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
2861 | TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r | |
2862 | for TianoCoreUserExtent in TianoCoreUserExtenList:\r | |
2863 | for Section in TianoCoreUserExtent:\r | |
2864 | ItemList = Section.split(TAB_SPLIT)\r | |
2865 | Arch = self.Arch\r | |
2866 | if len(ItemList) == 4:\r | |
2867 | Arch = ItemList[3]\r | |
2868 | if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r | |
2869 | TianoCoreList = []\r | |
2870 | TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r | |
2871 | TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r | |
2872 | TianoCoreList.append('\n')\r | |
2873 | TianoCoreUserExtentionList.append(TianoCoreList)\r | |
2874 | \r | |
2875 | return TianoCoreUserExtentionList\r | |
2876 | \r | |
2877 | ## Return the list of specification version required for the module\r | |
2878 | #\r | |
2879 | # @retval list The list of specification defined in module file\r | |
2880 | #\r | |
2881 | @cached_property\r | |
2882 | def Specification(self):\r | |
2883 | return self.Module.Specification\r | |
2884 | \r | |
2885 | ## Tool option for the module build\r | |
2886 | #\r | |
2887 | # @param PlatformInfo The object of PlatformBuildInfo\r | |
2888 | # @retval dict The dict containing valid options\r | |
2889 | #\r | |
2890 | @cached_property\r | |
2891 | def BuildOption(self):\r | |
2892 | RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r | |
2893 | if self.BuildRuleOrder:\r | |
2894 | self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r | |
2895 | return RetVal\r | |
2896 | \r | |
2897 | ## Get include path list from tool option for the module build\r | |
2898 | #\r | |
2899 | # @retval list The include path list\r | |
2900 | #\r | |
2901 | @cached_property\r | |
2902 | def BuildOptionIncPathList(self):\r | |
2903 | #\r | |
2904 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
2905 | # is the former use /I , the Latter used -I to specify include directories\r | |
2906 | #\r | |
2907 | if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r | |
2908 | BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r | |
2909 | elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r | |
2910 | BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r | |
2911 | else:\r | |
2912 | #\r | |
2913 | # New ToolChainFamily, don't known whether there is option to specify include directories\r | |
2914 | #\r | |
2915 | return []\r | |
2916 | \r | |
2917 | RetVal = []\r | |
2918 | for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r | |
2919 | try:\r | |
2920 | FlagOption = self.BuildOption[Tool]['FLAGS']\r | |
2921 | except KeyError:\r | |
2922 | FlagOption = ''\r | |
2923 | \r | |
2924 | if self.ToolChainFamily != 'RVCT':\r | |
2925 | IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r | |
2926 | else:\r | |
2927 | #\r | |
2928 | # RVCT may specify a list of directory seperated by commas\r | |
2929 | #\r | |
2930 | IncPathList = []\r | |
2931 | for Path in BuildOptIncludeRegEx.findall(FlagOption):\r | |
2932 | PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r | |
2933 | IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r | |
2934 | \r | |
2935 | #\r | |
2936 | # EDK II modules must not reference header files outside of the packages they depend on or\r | |
2937 | # within the module's directory tree. Report error if violation.\r | |
2938 | #\r | |
2939 | if self.AutoGenVersion >= 0x00010005:\r | |
2940 | for Path in IncPathList:\r | |
2941 | if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r | |
2942 | ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r | |
2943 | EdkLogger.error("build",\r | |
2944 | PARAMETER_INVALID,\r | |
2945 | ExtraData=ErrMsg,\r | |
2946 | File=str(self.MetaFile))\r | |
2947 | RetVal += IncPathList\r | |
2948 | return RetVal\r | |
2949 | \r | |
2950 | ## Return a list of files which can be built from source\r | |
2951 | #\r | |
2952 | # What kind of files can be built is determined by build rules in\r | |
2953 | # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r | |
2954 | #\r | |
2955 | @cached_property\r | |
2956 | def SourceFileList(self):\r | |
2957 | RetVal = []\r | |
2958 | ToolChainTagSet = {"", "*", self.ToolChain}\r | |
2959 | ToolChainFamilySet = {"", "*", self.ToolChainFamily, self.BuildRuleFamily}\r | |
2960 | for F in self.Module.Sources:\r | |
2961 | # match tool chain\r | |
2962 | if F.TagName not in ToolChainTagSet:\r | |
2963 | EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r | |
2964 | "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r | |
2965 | continue\r | |
2966 | # match tool chain family or build rule family\r | |
2967 | if F.ToolChainFamily not in ToolChainFamilySet:\r | |
2968 | EdkLogger.debug(\r | |
2969 | EdkLogger.DEBUG_0,\r | |
2970 | "The file [%s] must be built by tools of [%s], " \\r | |
2971 | "but current toolchain family is [%s], buildrule family is [%s]" \\r | |
2972 | % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r | |
2973 | continue\r | |
2974 | \r | |
2975 | # add the file path into search path list for file including\r | |
2976 | if F.Dir not in self.IncludePathList and self.AutoGenVersion >= 0x00010005:\r | |
2977 | self.IncludePathList.insert(0, F.Dir)\r | |
2978 | RetVal.append(F)\r | |
2979 | \r | |
2980 | self._MatchBuildRuleOrder(RetVal)\r | |
2981 | \r | |
2982 | for F in RetVal:\r | |
2983 | self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r | |
2984 | return RetVal\r | |
2985 | \r | |
2986 | def _MatchBuildRuleOrder(self, FileList):\r | |
2987 | Order_Dict = {}\r | |
2988 | self.BuildOption\r | |
2989 | for SingleFile in FileList:\r | |
2990 | if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r | |
2991 | key = SingleFile.Path.split(SingleFile.Ext)[0]\r | |
2992 | if key in Order_Dict:\r | |
2993 | Order_Dict[key].append(SingleFile.Ext)\r | |
2994 | else:\r | |
2995 | Order_Dict[key] = [SingleFile.Ext]\r | |
2996 | \r | |
2997 | RemoveList = []\r | |
2998 | for F in Order_Dict:\r | |
2999 | if len(Order_Dict[F]) > 1:\r | |
3000 | Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r | |
3001 | for Ext in Order_Dict[F][1:]:\r | |
3002 | RemoveList.append(F + Ext)\r | |
3003 | \r | |
3004 | for item in RemoveList:\r | |
3005 | FileList.remove(item)\r | |
3006 | \r | |
3007 | return FileList\r | |
3008 | \r | |
3009 | ## Return the list of unicode files\r | |
3010 | @cached_property\r | |
3011 | def UnicodeFileList(self):\r | |
3012 | return self.FileTypes.get(TAB_UNICODE_FILE,[])\r | |
3013 | \r | |
3014 | ## Return the list of vfr files\r | |
3015 | @cached_property\r | |
3016 | def VfrFileList(self):\r | |
3017 | return self.FileTypes.get(TAB_VFR_FILE, [])\r | |
3018 | \r | |
3019 | ## Return the list of Image Definition files\r | |
3020 | @cached_property\r | |
3021 | def IdfFileList(self):\r | |
3022 | return self.FileTypes.get(TAB_IMAGE_FILE,[])\r | |
3023 | \r | |
3024 | ## Return a list of files which can be built from binary\r | |
3025 | #\r | |
3026 | # "Build" binary files are just to copy them to build directory.\r | |
3027 | #\r | |
3028 | # @retval list The list of files which can be built later\r | |
3029 | #\r | |
3030 | @cached_property\r | |
3031 | def BinaryFileList(self):\r | |
3032 | RetVal = []\r | |
3033 | for F in self.Module.Binaries:\r | |
3034 | if F.Target not in [TAB_ARCH_COMMON, '*'] and F.Target != self.BuildTarget:\r | |
3035 | continue\r | |
3036 | RetVal.append(F)\r | |
3037 | self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r | |
3038 | return RetVal\r | |
3039 | \r | |
3040 | @cached_property\r | |
3041 | def BuildRules(self):\r | |
3042 | RetVal = {}\r | |
3043 | BuildRuleDatabase = self.PlatformInfo.BuildRule\r | |
3044 | for Type in BuildRuleDatabase.FileTypeList:\r | |
3045 | #first try getting build rule by BuildRuleFamily\r | |
3046 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r | |
3047 | if not RuleObject:\r | |
3048 | # build type is always module type, but ...\r | |
3049 | if self.ModuleType != self.BuildType:\r | |
3050 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r | |
3051 | #second try getting build rule by ToolChainFamily\r | |
3052 | if not RuleObject:\r | |
3053 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r | |
3054 | if not RuleObject:\r | |
3055 | # build type is always module type, but ...\r | |
3056 | if self.ModuleType != self.BuildType:\r | |
3057 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r | |
3058 | if not RuleObject:\r | |
3059 | continue\r | |
3060 | RuleObject = RuleObject.Instantiate(self.Macros)\r | |
3061 | RetVal[Type] = RuleObject\r | |
3062 | for Ext in RuleObject.SourceFileExtList:\r | |
3063 | RetVal[Ext] = RuleObject\r | |
3064 | return RetVal\r | |
3065 | \r | |
3066 | def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r | |
3067 | if self._BuildTargets is None:\r | |
3068 | self._IntroBuildTargetList = set()\r | |
3069 | self._FinalBuildTargetList = set()\r | |
3070 | self._BuildTargets = defaultdict(set)\r | |
3071 | self._FileTypes = defaultdict(set)\r | |
3072 | \r | |
3073 | if not BinaryFileList:\r | |
3074 | BinaryFileList = self.BinaryFileList\r | |
3075 | \r | |
3076 | SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r | |
3077 | if not os.path.exists(SubDirectory):\r | |
3078 | CreateDirectory(SubDirectory)\r | |
3079 | LastTarget = None\r | |
3080 | RuleChain = set()\r | |
3081 | SourceList = [File]\r | |
3082 | Index = 0\r | |
3083 | #\r | |
3084 | # Make sure to get build rule order value\r | |
3085 | #\r | |
3086 | self.BuildOption\r | |
3087 | \r | |
3088 | while Index < len(SourceList):\r | |
3089 | Source = SourceList[Index]\r | |
3090 | Index = Index + 1\r | |
3091 | \r | |
3092 | if Source != File:\r | |
3093 | CreateDirectory(Source.Dir)\r | |
3094 | \r | |
3095 | if File.IsBinary and File == Source and File in BinaryFileList:\r | |
3096 | # Skip all files that are not binary libraries\r | |
3097 | if not self.IsLibrary:\r | |
3098 | continue\r | |
3099 | RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r | |
3100 | elif FileType in self.BuildRules:\r | |
3101 | RuleObject = self.BuildRules[FileType]\r | |
3102 | elif Source.Ext in self.BuildRules:\r | |
3103 | RuleObject = self.BuildRules[Source.Ext]\r | |
3104 | else:\r | |
3105 | # stop at no more rules\r | |
3106 | if LastTarget:\r | |
3107 | self._FinalBuildTargetList.add(LastTarget)\r | |
3108 | break\r | |
3109 | \r | |
3110 | FileType = RuleObject.SourceFileType\r | |
3111 | self._FileTypes[FileType].add(Source)\r | |
3112 | \r | |
3113 | # stop at STATIC_LIBRARY for library\r | |
3114 | if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r | |
3115 | if LastTarget:\r | |
3116 | self._FinalBuildTargetList.add(LastTarget)\r | |
3117 | break\r | |
3118 | \r | |
3119 | Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r | |
3120 | if not Target:\r | |
3121 | if LastTarget:\r | |
3122 | self._FinalBuildTargetList.add(LastTarget)\r | |
3123 | break\r | |
3124 | elif not Target.Outputs:\r | |
3125 | # Only do build for target with outputs\r | |
3126 | self._FinalBuildTargetList.add(Target)\r | |
3127 | \r | |
3128 | self._BuildTargets[FileType].add(Target)\r | |
3129 | \r | |
3130 | if not Source.IsBinary and Source == File:\r | |
3131 | self._IntroBuildTargetList.add(Target)\r | |
3132 | \r | |
3133 | # to avoid cyclic rule\r | |
3134 | if FileType in RuleChain:\r | |
3135 | break\r | |
3136 | \r | |
3137 | RuleChain.add(FileType)\r | |
3138 | SourceList.extend(Target.Outputs)\r | |
3139 | LastTarget = Target\r | |
3140 | FileType = TAB_UNKNOWN_FILE\r | |
3141 | \r | |
3142 | @cached_property\r | |
3143 | def Targets(self):\r | |
3144 | if self._BuildTargets is None:\r | |
3145 | self._IntroBuildTargetList = set()\r | |
3146 | self._FinalBuildTargetList = set()\r | |
3147 | self._BuildTargets = defaultdict(set)\r | |
3148 | self._FileTypes = defaultdict(set)\r | |
3149 | \r | |
3150 | #TRICK: call SourceFileList property to apply build rule for source files\r | |
3151 | self.SourceFileList\r | |
3152 | \r | |
3153 | #TRICK: call _GetBinaryFileList to apply build rule for binary files\r | |
3154 | self.BinaryFileList\r | |
3155 | \r | |
3156 | return self._BuildTargets\r | |
3157 | \r | |
3158 | @cached_property\r | |
3159 | def IntroTargetList(self):\r | |
3160 | self.Targets\r | |
3161 | return self._IntroBuildTargetList\r | |
3162 | \r | |
3163 | @cached_property\r | |
3164 | def CodaTargetList(self):\r | |
3165 | self.Targets\r | |
3166 | return self._FinalBuildTargetList\r | |
3167 | \r | |
3168 | @cached_property\r | |
3169 | def FileTypes(self):\r | |
3170 | self.Targets\r | |
3171 | return self._FileTypes\r | |
3172 | \r | |
3173 | ## Get the list of package object the module depends on\r | |
3174 | #\r | |
3175 | # @retval list The package object list\r | |
3176 | #\r | |
3177 | @cached_property\r | |
3178 | def DependentPackageList(self):\r | |
3179 | return self.Module.Packages\r | |
3180 | \r | |
3181 | ## Return the list of auto-generated code file\r | |
3182 | #\r | |
3183 | # @retval list The list of auto-generated file\r | |
3184 | #\r | |
3185 | @cached_property\r | |
3186 | def AutoGenFileList(self):\r | |
3187 | AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r | |
3188 | UniStringBinBuffer = BytesIO()\r | |
3189 | IdfGenBinBuffer = BytesIO()\r | |
3190 | RetVal = {}\r | |
3191 | AutoGenC = TemplateString()\r | |
3192 | AutoGenH = TemplateString()\r | |
3193 | StringH = TemplateString()\r | |
3194 | StringIdf = TemplateString()\r | |
3195 | GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r | |
3196 | #\r | |
3197 | # AutoGen.c is generated if there are library classes in inf, or there are object files\r | |
3198 | #\r | |
3199 | if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r | |
3200 | or TAB_OBJECT_FILE in self.FileTypes):\r | |
3201 | AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r | |
3202 | RetVal[AutoFile] = str(AutoGenC)\r | |
3203 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3204 | if str(AutoGenH) != "":\r | |
3205 | AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r | |
3206 | RetVal[AutoFile] = str(AutoGenH)\r | |
3207 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3208 | if str(StringH) != "":\r | |
3209 | AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r | |
3210 | RetVal[AutoFile] = str(StringH)\r | |
3211 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3212 | if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r | |
3213 | AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r | |
3214 | RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r | |
3215 | AutoFile.IsBinary = True\r | |
3216 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3217 | if UniStringBinBuffer is not None:\r | |
3218 | UniStringBinBuffer.close()\r | |
3219 | if str(StringIdf) != "":\r | |
3220 | AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r | |
3221 | RetVal[AutoFile] = str(StringIdf)\r | |
3222 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3223 | if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r | |
3224 | AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r | |
3225 | RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r | |
3226 | AutoFile.IsBinary = True\r | |
3227 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3228 | if IdfGenBinBuffer is not None:\r | |
3229 | IdfGenBinBuffer.close()\r | |
3230 | return RetVal\r | |
3231 | \r | |
3232 | ## Return the list of library modules explicitly or implicityly used by this module\r | |
3233 | @cached_property\r | |
3234 | def DependentLibraryList(self):\r | |
3235 | # only merge library classes and PCD for non-library module\r | |
3236 | if self.IsLibrary:\r | |
3237 | return []\r | |
3238 | if self.AutoGenVersion < 0x00010005:\r | |
3239 | return self.PlatformInfo.ResolveLibraryReference(self.Module)\r | |
3240 | return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r | |
3241 | \r | |
3242 | ## Get the list of PCDs from current module\r | |
3243 | #\r | |
3244 | # @retval list The list of PCD\r | |
3245 | #\r | |
3246 | @cached_property\r | |
3247 | def ModulePcdList(self):\r | |
3248 | # apply PCD settings from platform\r | |
3249 | RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r | |
3250 | ExtendCopyDictionaryLists(self._PcdComments, self.Module.PcdComments)\r | |
3251 | return RetVal\r | |
3252 | \r | |
3253 | ## Get the list of PCDs from dependent libraries\r | |
3254 | #\r | |
3255 | # @retval list The list of PCD\r | |
3256 | #\r | |
3257 | @cached_property\r | |
3258 | def LibraryPcdList(self):\r | |
3259 | if self.IsLibrary:\r | |
3260 | return []\r | |
3261 | RetVal = []\r | |
3262 | Pcds = set()\r | |
3263 | # get PCDs from dependent libraries\r | |
3264 | for Library in self.DependentLibraryList:\r | |
3265 | PcdsInLibrary = OrderedDict()\r | |
3266 | ExtendCopyDictionaryLists(self._PcdComments, Library.PcdComments)\r | |
3267 | for Key in Library.Pcds:\r | |
3268 | # skip duplicated PCDs\r | |
3269 | if Key in self.Module.Pcds or Key in Pcds:\r | |
3270 | continue\r | |
3271 | Pcds.add(Key)\r | |
3272 | PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r | |
3273 | RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r | |
3274 | return RetVal\r | |
3275 | \r | |
3276 | ## Get the GUID value mapping\r | |
3277 | #\r | |
3278 | # @retval dict The mapping between GUID cname and its value\r | |
3279 | #\r | |
3280 | @cached_property\r | |
3281 | def GuidList(self):\r | |
3282 | RetVal = OrderedDict(self.Module.Guids)\r | |
3283 | for Library in self.DependentLibraryList:\r | |
3284 | RetVal.update(Library.Guids)\r | |
3285 | ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r | |
3286 | ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r | |
3287 | return RetVal\r | |
3288 | \r | |
3289 | @cached_property\r | |
3290 | def GetGuidsUsedByPcd(self):\r | |
3291 | RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r | |
3292 | for Library in self.DependentLibraryList:\r | |
3293 | RetVal.update(Library.GetGuidsUsedByPcd())\r | |
3294 | return RetVal\r | |
3295 | ## Get the protocol value mapping\r | |
3296 | #\r | |
3297 | # @retval dict The mapping between protocol cname and its value\r | |
3298 | #\r | |
3299 | @cached_property\r | |
3300 | def ProtocolList(self):\r | |
3301 | RetVal = OrderedDict(self.Module.Protocols)\r | |
3302 | for Library in self.DependentLibraryList:\r | |
3303 | RetVal.update(Library.Protocols)\r | |
3304 | ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r | |
3305 | ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r | |
3306 | return RetVal\r | |
3307 | \r | |
3308 | ## Get the PPI value mapping\r | |
3309 | #\r | |
3310 | # @retval dict The mapping between PPI cname and its value\r | |
3311 | #\r | |
3312 | @cached_property\r | |
3313 | def PpiList(self):\r | |
3314 | RetVal = OrderedDict(self.Module.Ppis)\r | |
3315 | for Library in self.DependentLibraryList:\r | |
3316 | RetVal.update(Library.Ppis)\r | |
3317 | ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r | |
3318 | ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r | |
3319 | return RetVal\r | |
3320 | \r | |
3321 | ## Get the list of include search path\r | |
3322 | #\r | |
3323 | # @retval list The list path\r | |
3324 | #\r | |
3325 | @cached_property\r | |
3326 | def IncludePathList(self):\r | |
3327 | RetVal = []\r | |
3328 | if self.AutoGenVersion < 0x00010005:\r | |
3329 | for Inc in self.Module.Includes:\r | |
3330 | if Inc not in RetVal:\r | |
3331 | RetVal.append(Inc)\r | |
3332 | # for Edk modules\r | |
3333 | Inc = path.join(Inc, self.Arch.capitalize())\r | |
3334 | if os.path.exists(Inc) and Inc not in RetVal:\r | |
3335 | RetVal.append(Inc)\r | |
3336 | # Edk module needs to put DEBUG_DIR at the end of search path and not to use SOURCE_DIR all the time\r | |
3337 | RetVal.append(self.DebugDir)\r | |
3338 | else:\r | |
3339 | RetVal.append(self.MetaFile.Dir)\r | |
3340 | RetVal.append(self.DebugDir)\r | |
3341 | \r | |
3342 | for Package in self.Module.Packages:\r | |
3343 | PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r | |
3344 | if PackageDir not in RetVal:\r | |
3345 | RetVal.append(PackageDir)\r | |
3346 | IncludesList = Package.Includes\r | |
3347 | if Package._PrivateIncludes:\r | |
3348 | if not self.MetaFile.Path.startswith(PackageDir):\r | |
3349 | IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r | |
3350 | for Inc in IncludesList:\r | |
3351 | if Inc not in RetVal:\r | |
3352 | RetVal.append(str(Inc))\r | |
3353 | return RetVal\r | |
3354 | \r | |
3355 | @cached_property\r | |
3356 | def IncludePathLength(self):\r | |
3357 | return sum(len(inc)+1 for inc in self.IncludePathList)\r | |
3358 | \r | |
3359 | ## Get HII EX PCDs which maybe used by VFR\r | |
3360 | #\r | |
3361 | # efivarstore used by VFR may relate with HII EX PCDs\r | |
3362 | # Get the variable name and GUID from efivarstore and HII EX PCD\r | |
3363 | # List the HII EX PCDs in As Built INF if both name and GUID match.\r | |
3364 | #\r | |
3365 | # @retval list HII EX PCDs\r | |
3366 | #\r | |
3367 | def _GetPcdsMaybeUsedByVfr(self):\r | |
3368 | if not self.SourceFileList:\r | |
3369 | return []\r | |
3370 | \r | |
3371 | NameGuids = set()\r | |
3372 | for SrcFile in self.SourceFileList:\r | |
3373 | if SrcFile.Ext.lower() != '.vfr':\r | |
3374 | continue\r | |
3375 | Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r | |
3376 | if not os.path.exists(Vfri):\r | |
3377 | continue\r | |
3378 | VfriFile = open(Vfri, 'r')\r | |
3379 | Content = VfriFile.read()\r | |
3380 | VfriFile.close()\r | |
3381 | Pos = Content.find('efivarstore')\r | |
3382 | while Pos != -1:\r | |
3383 | #\r | |
3384 | # Make sure 'efivarstore' is the start of efivarstore statement\r | |
3385 | # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r | |
3386 | #\r | |
3387 | Index = Pos - 1\r | |
3388 | while Index >= 0 and Content[Index] in ' \t\r\n':\r | |
3389 | Index -= 1\r | |
3390 | if Index >= 0 and Content[Index] != ';':\r | |
3391 | Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r | |
3392 | continue\r | |
3393 | #\r | |
3394 | # 'efivarstore' must be followed by name and guid\r | |
3395 | #\r | |
3396 | Name = gEfiVarStoreNamePattern.search(Content, Pos)\r | |
3397 | if not Name:\r | |
3398 | break\r | |
3399 | Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r | |
3400 | if not Guid:\r | |
3401 | break\r | |
3402 | NameArray = ConvertStringToByteArray('L"' + Name.group(1) + '"')\r | |
3403 | NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r | |
3404 | Pos = Content.find('efivarstore', Name.end())\r | |
3405 | if not NameGuids:\r | |
3406 | return []\r | |
3407 | HiiExPcds = []\r | |
3408 | for Pcd in self.PlatformInfo.Platform.Pcds.values():\r | |
3409 | if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r | |
3410 | continue\r | |
3411 | for SkuInfo in Pcd.SkuInfoList.values():\r | |
3412 | Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r | |
3413 | if not Value:\r | |
3414 | continue\r | |
3415 | Name = ConvertStringToByteArray(SkuInfo.VariableName)\r | |
3416 | Guid = GuidStructureStringToGuidString(Value)\r | |
3417 | if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r | |
3418 | HiiExPcds.append(Pcd)\r | |
3419 | break\r | |
3420 | \r | |
3421 | return HiiExPcds\r | |
3422 | \r | |
3423 | def _GenOffsetBin(self):\r | |
3424 | VfrUniBaseName = {}\r | |
3425 | for SourceFile in self.Module.Sources:\r | |
3426 | if SourceFile.Type.upper() == ".VFR" :\r | |
3427 | #\r | |
3428 | # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r | |
3429 | #\r | |
3430 | VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r | |
3431 | elif SourceFile.Type.upper() == ".UNI" :\r | |
3432 | #\r | |
3433 | # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r | |
3434 | #\r | |
3435 | VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r | |
3436 | \r | |
3437 | if not VfrUniBaseName:\r | |
3438 | return None\r | |
3439 | MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r | |
3440 | EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r | |
3441 | VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())\r | |
3442 | if not VfrUniOffsetList:\r | |
3443 | return None\r | |
3444 | \r | |
3445 | OutputName = '%sOffset.bin' % self.Name\r | |
3446 | UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r | |
3447 | \r | |
3448 | try:\r | |
3449 | fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r | |
3450 | except:\r | |
3451 | EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r | |
3452 | \r | |
3453 | # Use a instance of BytesIO to cache data\r | |
3454 | fStringIO = BytesIO('')\r | |
3455 | \r | |
3456 | for Item in VfrUniOffsetList:\r | |
3457 | if (Item[0].find("Strings") != -1):\r | |
3458 | #\r | |
3459 | # UNI offset in image.\r | |
3460 | # GUID + Offset\r | |
3461 | # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r | |
3462 | #\r | |
3463 | UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r | |
3464 | UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r | |
3465 | fStringIO.write(''.join(UniGuid))\r | |
3466 | UniValue = pack ('Q', int (Item[1], 16))\r | |
3467 | fStringIO.write (UniValue)\r | |
3468 | else:\r | |
3469 | #\r | |
3470 | # VFR binary offset in image.\r | |
3471 | # GUID + Offset\r | |
3472 | # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r | |
3473 | #\r | |
3474 | VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r | |
3475 | VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r | |
3476 | fStringIO.write(''.join(VfrGuid))\r | |
3477 | VfrValue = pack ('Q', int (Item[1], 16))\r | |
3478 | fStringIO.write (VfrValue)\r | |
3479 | #\r | |
3480 | # write data into file.\r | |
3481 | #\r | |
3482 | try :\r | |
3483 | fInputfile.write (fStringIO.getvalue())\r | |
3484 | except:\r | |
3485 | EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r | |
3486 | "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r | |
3487 | \r | |
3488 | fStringIO.close ()\r | |
3489 | fInputfile.close ()\r | |
3490 | return OutputName\r | |
3491 | \r | |
3492 | ## Create AsBuilt INF file the module\r | |
3493 | #\r | |
3494 | def CreateAsBuiltInf(self, IsOnlyCopy = False):\r | |
3495 | self.OutputFile = set()\r | |
3496 | if IsOnlyCopy and GlobalData.gBinCacheDest:\r | |
3497 | self.CopyModuleToCache()\r | |
3498 | return\r | |
3499 | \r | |
3500 | if self.IsAsBuiltInfCreated:\r | |
3501 | return\r | |
3502 | \r | |
3503 | # Skip the following code for EDK I inf\r | |
3504 | if self.AutoGenVersion < 0x00010005:\r | |
3505 | return\r | |
3506 | \r | |
3507 | # Skip the following code for libraries\r | |
3508 | if self.IsLibrary:\r | |
3509 | return\r | |
3510 | \r | |
3511 | # Skip the following code for modules with no source files\r | |
3512 | if not self.SourceFileList:\r | |
3513 | return\r | |
3514 | \r | |
3515 | # Skip the following code for modules without any binary files\r | |
3516 | if self.BinaryFileList:\r | |
3517 | return\r | |
3518 | \r | |
3519 | ### TODO: How to handles mixed source and binary modules\r | |
3520 | \r | |
3521 | # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r | |
3522 | # Also find all packages that the DynamicEx PCDs depend on\r | |
3523 | Pcds = []\r | |
3524 | PatchablePcds = []\r | |
3525 | Packages = []\r | |
3526 | PcdCheckList = []\r | |
3527 | PcdTokenSpaceList = []\r | |
3528 | for Pcd in self.ModulePcdList + self.LibraryPcdList:\r | |
3529 | if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
3530 | PatchablePcds.append(Pcd)\r | |
3531 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r | |
3532 | elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
3533 | if Pcd not in Pcds:\r | |
3534 | Pcds.append(Pcd)\r | |
3535 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r | |
3536 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r | |
3537 | PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r | |
3538 | GuidList = OrderedDict(self.GuidList)\r | |
3539 | for TokenSpace in self.GetGuidsUsedByPcd:\r | |
3540 | # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r | |
3541 | # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r | |
3542 | if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r | |
3543 | GuidList.pop(TokenSpace)\r | |
3544 | CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r | |
3545 | for Package in self.DerivedPackageList:\r | |
3546 | if Package in Packages:\r | |
3547 | continue\r | |
3548 | BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r | |
3549 | Found = False\r | |
3550 | for Index in range(len(BeChecked)):\r | |
3551 | for Item in CheckList[Index]:\r | |
3552 | if Item in BeChecked[Index]:\r | |
3553 | Packages.append(Package)\r | |
3554 | Found = True\r | |
3555 | break\r | |
3556 | if Found:\r | |
3557 | break\r | |
3558 | \r | |
3559 | VfrPcds = self._GetPcdsMaybeUsedByVfr()\r | |
3560 | for Pkg in self.PlatformInfo.PackageList:\r | |
3561 | if Pkg in Packages:\r | |
3562 | continue\r | |
3563 | for VfrPcd in VfrPcds:\r | |
3564 | if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r | |
3565 | (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r | |
3566 | Packages.append(Pkg)\r | |
3567 | break\r | |
3568 | \r | |
3569 | ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r | |
3570 | DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r | |
3571 | Guid = self.Guid\r | |
3572 | MDefs = self.Module.Defines\r | |
3573 | \r | |
3574 | AsBuiltInfDict = {\r | |
3575 | 'module_name' : self.Name,\r | |
3576 | 'module_guid' : Guid,\r | |
3577 | 'module_module_type' : ModuleType,\r | |
3578 | 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r | |
3579 | 'pcd_is_driver_string' : [],\r | |
3580 | 'module_uefi_specification_version' : [],\r | |
3581 | 'module_pi_specification_version' : [],\r | |
3582 | 'module_entry_point' : self.Module.ModuleEntryPointList,\r | |
3583 | 'module_unload_image' : self.Module.ModuleUnloadImageList,\r | |
3584 | 'module_constructor' : self.Module.ConstructorList,\r | |
3585 | 'module_destructor' : self.Module.DestructorList,\r | |
3586 | 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r | |
3587 | 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r | |
3588 | 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r | |
3589 | 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r | |
3590 | 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r | |
3591 | 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r | |
3592 | 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r | |
3593 | 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r | |
3594 | 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r | |
3595 | 'module_arch' : self.Arch,\r | |
3596 | 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r | |
3597 | 'binary_item' : [],\r | |
3598 | 'patchablepcd_item' : [],\r | |
3599 | 'pcd_item' : [],\r | |
3600 | 'protocol_item' : [],\r | |
3601 | 'ppi_item' : [],\r | |
3602 | 'guid_item' : [],\r | |
3603 | 'flags_item' : [],\r | |
3604 | 'libraryclasses_item' : []\r | |
3605 | }\r | |
3606 | \r | |
3607 | if 'MODULE_UNI_FILE' in MDefs:\r | |
3608 | UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r | |
3609 | if os.path.isfile(UNIFile):\r | |
3610 | shutil.copy2(UNIFile, self.OutputDir)\r | |
3611 | \r | |
3612 | if self.AutoGenVersion > int(gInfSpecVersion, 0):\r | |
3613 | AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r | |
3614 | else:\r | |
3615 | AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r | |
3616 | \r | |
3617 | if DriverType:\r | |
3618 | AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r | |
3619 | \r | |
3620 | if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r | |
3621 | AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r | |
3622 | if 'PI_SPECIFICATION_VERSION' in self.Specification:\r | |
3623 | AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r | |
3624 | \r | |
3625 | OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r | |
3626 | DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r | |
3627 | for Item in self.CodaTargetList:\r | |
3628 | File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r | |
3629 | self.OutputFile.add(File)\r | |
3630 | if os.path.isabs(File):\r | |
3631 | File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r | |
3632 | if Item.Target.Ext.lower() == '.aml':\r | |
3633 | AsBuiltInfDict['binary_item'].append('ASL|' + File)\r | |
3634 | elif Item.Target.Ext.lower() == '.acpi':\r | |
3635 | AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r | |
3636 | elif Item.Target.Ext.lower() == '.efi':\r | |
3637 | AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r | |
3638 | else:\r | |
3639 | AsBuiltInfDict['binary_item'].append('BIN|' + File)\r | |
3640 | if self.DepexGenerated:\r | |
3641 | self.OutputFile.add(self.Name + '.depex')\r | |
3642 | if self.ModuleType in [SUP_MODULE_PEIM]:\r | |
3643 | AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r | |
3644 | elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r | |
3645 | AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r | |
3646 | elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r | |
3647 | AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r | |
3648 | \r | |
3649 | Bin = self._GenOffsetBin()\r | |
3650 | if Bin:\r | |
3651 | AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r | |
3652 | self.OutputFile.add(Bin)\r | |
3653 | \r | |
3654 | for Root, Dirs, Files in os.walk(OutputDir):\r | |
3655 | for File in Files:\r | |
3656 | if File.lower().endswith('.pdb'):\r | |
3657 | AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r | |
3658 | self.OutputFile.add(File)\r | |
3659 | HeaderComments = self.Module.HeaderComments\r | |
3660 | StartPos = 0\r | |
3661 | for Index in range(len(HeaderComments)):\r | |
3662 | if HeaderComments[Index].find('@BinaryHeader') != -1:\r | |
3663 | HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r | |
3664 | StartPos = Index\r | |
3665 | break\r | |
3666 | AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r | |
3667 | AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r | |
3668 | \r | |
3669 | GenList = [\r | |
3670 | (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r | |
3671 | (self.PpiList, self._PpiComments, 'ppi_item'),\r | |
3672 | (GuidList, self._GuidComments, 'guid_item')\r | |
3673 | ]\r | |
3674 | for Item in GenList:\r | |
3675 | for CName in Item[0]:\r | |
3676 | Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r | |
3677 | Entry = Comments + '\n ' + CName if Comments else CName\r | |
3678 | AsBuiltInfDict[Item[2]].append(Entry)\r | |
3679 | PatchList = parsePcdInfoFromMapFile(\r | |
3680 | os.path.join(self.OutputDir, self.Name + '.map'),\r | |
3681 | os.path.join(self.OutputDir, self.Name + '.efi')\r | |
3682 | )\r | |
3683 | if PatchList:\r | |
3684 | for Pcd in PatchablePcds:\r | |
3685 | TokenCName = Pcd.TokenCName\r | |
3686 | for PcdItem in GlobalData.MixedPcd:\r | |
3687 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
3688 | TokenCName = PcdItem[0]\r | |
3689 | break\r | |
3690 | for PatchPcd in PatchList:\r | |
3691 | if TokenCName == PatchPcd[0]:\r | |
3692 | break\r | |
3693 | else:\r | |
3694 | continue\r | |
3695 | PcdValue = ''\r | |
3696 | if Pcd.DatumType == 'BOOLEAN':\r | |
3697 | BoolValue = Pcd.DefaultValue.upper()\r | |
3698 | if BoolValue == 'TRUE':\r | |
3699 | Pcd.DefaultValue = '1'\r | |
3700 | elif BoolValue == 'FALSE':\r | |
3701 | Pcd.DefaultValue = '0'\r | |
3702 | \r | |
3703 | if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r | |
3704 | HexFormat = '0x%02x'\r | |
3705 | if Pcd.DatumType == TAB_UINT16:\r | |
3706 | HexFormat = '0x%04x'\r | |
3707 | elif Pcd.DatumType == TAB_UINT32:\r | |
3708 | HexFormat = '0x%08x'\r | |
3709 | elif Pcd.DatumType == TAB_UINT64:\r | |
3710 | HexFormat = '0x%016x'\r | |
3711 | PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r | |
3712 | else:\r | |
3713 | if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r | |
3714 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
3715 | "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
3716 | )\r | |
3717 | ArraySize = int(Pcd.MaxDatumSize, 0)\r | |
3718 | PcdValue = Pcd.DefaultValue\r | |
3719 | if PcdValue[0] != '{':\r | |
3720 | Unicode = False\r | |
3721 | if PcdValue[0] == 'L':\r | |
3722 | Unicode = True\r | |
3723 | PcdValue = PcdValue.lstrip('L')\r | |
3724 | PcdValue = eval(PcdValue)\r | |
3725 | NewValue = '{'\r | |
3726 | for Index in range(0, len(PcdValue)):\r | |
3727 | if Unicode:\r | |
3728 | CharVal = ord(PcdValue[Index])\r | |
3729 | NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r | |
3730 | + '0x%02x' % (CharVal >> 8) + ', '\r | |
3731 | else:\r | |
3732 | NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r | |
3733 | Padding = '0x00, '\r | |
3734 | if Unicode:\r | |
3735 | Padding = Padding * 2\r | |
3736 | ArraySize = ArraySize / 2\r | |
3737 | if ArraySize < (len(PcdValue) + 1):\r | |
3738 | if Pcd.MaxSizeUserSet:\r | |
3739 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
3740 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
3741 | )\r | |
3742 | else:\r | |
3743 | ArraySize = len(PcdValue) + 1\r | |
3744 | if ArraySize > len(PcdValue) + 1:\r | |
3745 | NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r | |
3746 | PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r | |
3747 | elif len(PcdValue.split(',')) <= ArraySize:\r | |
3748 | PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r | |
3749 | PcdValue += '}'\r | |
3750 | else:\r | |
3751 | if Pcd.MaxSizeUserSet:\r | |
3752 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
3753 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
3754 | )\r | |
3755 | else:\r | |
3756 | ArraySize = len(PcdValue) + 1\r | |
3757 | PcdItem = '%s.%s|%s|0x%X' % \\r | |
3758 | (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r | |
3759 | PcdComments = ''\r | |
3760 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
3761 | PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r | |
3762 | if PcdComments:\r | |
3763 | PcdItem = PcdComments + '\n ' + PcdItem\r | |
3764 | AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r | |
3765 | \r | |
3766 | for Pcd in Pcds + VfrPcds:\r | |
3767 | PcdCommentList = []\r | |
3768 | HiiInfo = ''\r | |
3769 | TokenCName = Pcd.TokenCName\r | |
3770 | for PcdItem in GlobalData.MixedPcd:\r | |
3771 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
3772 | TokenCName = PcdItem[0]\r | |
3773 | break\r | |
3774 | if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r | |
3775 | for SkuName in Pcd.SkuInfoList:\r | |
3776 | SkuInfo = Pcd.SkuInfoList[SkuName]\r | |
3777 | HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r | |
3778 | break\r | |
3779 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
3780 | PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r | |
3781 | if HiiInfo:\r | |
3782 | UsageIndex = -1\r | |
3783 | UsageStr = ''\r | |
3784 | for Index, Comment in enumerate(PcdCommentList):\r | |
3785 | for Usage in UsageList:\r | |
3786 | if Comment.find(Usage) != -1:\r | |
3787 | UsageStr = Usage\r | |
3788 | UsageIndex = Index\r | |
3789 | break\r | |
3790 | if UsageIndex != -1:\r | |
3791 | PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r | |
3792 | else:\r | |
3793 | PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r | |
3794 | PcdComments = '\n '.join(PcdCommentList)\r | |
3795 | PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r | |
3796 | if PcdComments:\r | |
3797 | PcdEntry = PcdComments + '\n ' + PcdEntry\r | |
3798 | AsBuiltInfDict['pcd_item'].append(PcdEntry)\r | |
3799 | for Item in self.BuildOption:\r | |
3800 | if 'FLAGS' in self.BuildOption[Item]:\r | |
3801 | AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r | |
3802 | \r | |
3803 | # Generated LibraryClasses section in comments.\r | |
3804 | for Library in self.LibraryAutoGenList:\r | |
3805 | AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r | |
3806 | \r | |
3807 | # Generated UserExtensions TianoCore section.\r | |
3808 | # All tianocore user extensions are copied.\r | |
3809 | UserExtStr = ''\r | |
3810 | for TianoCore in self._GetTianoCoreUserExtensionList():\r | |
3811 | UserExtStr += '\n'.join(TianoCore)\r | |
3812 | ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r | |
3813 | if os.path.isfile(ExtensionFile):\r | |
3814 | shutil.copy2(ExtensionFile, self.OutputDir)\r | |
3815 | AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r | |
3816 | \r | |
3817 | # Generated depex expression section in comments.\r | |
3818 | DepexExpresion = self._GetDepexExpresionString()\r | |
3819 | AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''\r | |
3820 | \r | |
3821 | AsBuiltInf = TemplateString()\r | |
3822 | AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r | |
3823 | \r | |
3824 | SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r | |
3825 | \r | |
3826 | self.IsAsBuiltInfCreated = True\r | |
3827 | if GlobalData.gBinCacheDest:\r | |
3828 | self.CopyModuleToCache()\r | |
3829 | \r | |
3830 | def CopyModuleToCache(self):\r | |
3831 | FileDir = path.join(GlobalData.gBinCacheDest, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
3832 | CreateDirectory (FileDir)\r | |
3833 | HashFile = path.join(self.BuildDir, self.Name + '.hash')\r | |
3834 | ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r | |
3835 | if os.path.exists(HashFile):\r | |
3836 | shutil.copy2(HashFile, FileDir)\r | |
3837 | if os.path.exists(ModuleFile):\r | |
3838 | shutil.copy2(ModuleFile, FileDir)\r | |
3839 | if not self.OutputFile:\r | |
3840 | Ma = self.BuildDatabase[PathClass(ModuleFile), self.Arch, self.BuildTarget, self.ToolChain]\r | |
3841 | self.OutputFile = Ma.Binaries\r | |
3842 | if self.OutputFile:\r | |
3843 | for File in self.OutputFile:\r | |
3844 | File = str(File)\r | |
3845 | if not os.path.isabs(File):\r | |
3846 | File = os.path.join(self.OutputDir, File)\r | |
3847 | if os.path.exists(File):\r | |
3848 | shutil.copy2(File, FileDir)\r | |
3849 | \r | |
3850 | def AttemptModuleCacheCopy(self):\r | |
3851 | if self.IsBinaryModule:\r | |
3852 | return False\r | |
3853 | FileDir = path.join(GlobalData.gBinCacheSource, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
3854 | HashFile = path.join(FileDir, self.Name + '.hash')\r | |
3855 | if os.path.exists(HashFile):\r | |
3856 | f = open(HashFile, 'r')\r | |
3857 | CacheHash = f.read()\r | |
3858 | f.close()\r | |
3859 | if GlobalData.gModuleHash[self.Arch][self.Name]:\r | |
3860 | if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r | |
3861 | for root, dir, files in os.walk(FileDir):\r | |
3862 | for f in files:\r | |
3863 | if self.Name + '.hash' in f:\r | |
3864 | shutil.copy2(HashFile, self.BuildDir)\r | |
3865 | else:\r | |
3866 | File = path.join(root, f)\r | |
3867 | shutil.copy2(File, self.OutputDir)\r | |
3868 | if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r | |
3869 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
3870 | return True\r | |
3871 | return False\r | |
3872 | \r | |
3873 | ## Create makefile for the module and its dependent libraries\r | |
3874 | #\r | |
3875 | # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r | |
3876 | # dependent libraries will be created\r | |
3877 | #\r | |
3878 | @cached_class_function\r | |
3879 | def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r | |
3880 | # nest this function inside it's only caller.\r | |
3881 | def CreateTimeStamp():\r | |
3882 | FileSet = {self.MetaFile.Path}\r | |
3883 | \r | |
3884 | for SourceFile in self.Module.Sources:\r | |
3885 | FileSet.add (SourceFile.Path)\r | |
3886 | \r | |
3887 | for Lib in self.DependentLibraryList:\r | |
3888 | FileSet.add (Lib.MetaFile.Path)\r | |
3889 | \r | |
3890 | for f in self.AutoGenDepSet:\r | |
3891 | FileSet.add (f.Path)\r | |
3892 | \r | |
3893 | if os.path.exists (self.TimeStampPath):\r | |
3894 | os.remove (self.TimeStampPath)\r | |
3895 | with open(self.TimeStampPath, 'w+') as file:\r | |
3896 | for f in FileSet:\r | |
3897 | print(f, file=file)\r | |
3898 | \r | |
3899 | # Ignore generating makefile when it is a binary module\r | |
3900 | if self.IsBinaryModule:\r | |
3901 | return\r | |
3902 | \r | |
3903 | self.GenFfsList = GenFfsList\r | |
3904 | if not self.IsLibrary and CreateLibraryMakeFile:\r | |
3905 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
3906 | LibraryAutoGen.CreateMakeFile()\r | |
3907 | \r | |
3908 | if self.CanSkip():\r | |
3909 | return\r | |
3910 | \r | |
3911 | if len(self.CustomMakefile) == 0:\r | |
3912 | Makefile = GenMake.ModuleMakefile(self)\r | |
3913 | else:\r | |
3914 | Makefile = GenMake.CustomMakefile(self)\r | |
3915 | if Makefile.Generate():\r | |
3916 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r | |
3917 | (self.Name, self.Arch))\r | |
3918 | else:\r | |
3919 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r | |
3920 | (self.Name, self.Arch))\r | |
3921 | \r | |
3922 | CreateTimeStamp()\r | |
3923 | \r | |
3924 | def CopyBinaryFiles(self):\r | |
3925 | for File in self.Module.Binaries:\r | |
3926 | SrcPath = File.Path\r | |
3927 | DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r | |
3928 | CopyLongFilePath(SrcPath, DstPath)\r | |
3929 | ## Create autogen code for the module and its dependent libraries\r | |
3930 | #\r | |
3931 | # @param CreateLibraryCodeFile Flag indicating if or not the code of\r | |
3932 | # dependent libraries will be created\r | |
3933 | #\r | |
3934 | def CreateCodeFile(self, CreateLibraryCodeFile=True):\r | |
3935 | if self.IsCodeFileCreated:\r | |
3936 | return\r | |
3937 | \r | |
3938 | # Need to generate PcdDatabase even PcdDriver is binarymodule\r | |
3939 | if self.IsBinaryModule and self.PcdIsDriver != '':\r | |
3940 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
3941 | return\r | |
3942 | if self.IsBinaryModule:\r | |
3943 | if self.IsLibrary:\r | |
3944 | self.CopyBinaryFiles()\r | |
3945 | return\r | |
3946 | \r | |
3947 | if not self.IsLibrary and CreateLibraryCodeFile:\r | |
3948 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
3949 | LibraryAutoGen.CreateCodeFile()\r | |
3950 | \r | |
3951 | if self.CanSkip():\r | |
3952 | return\r | |
3953 | \r | |
3954 | AutoGenList = []\r | |
3955 | IgoredAutoGenList = []\r | |
3956 | \r | |
3957 | for File in self.AutoGenFileList:\r | |
3958 | if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r | |
3959 | #Ignore Edk AutoGen.c\r | |
3960 | if self.AutoGenVersion < 0x00010005 and File.Name == 'AutoGen.c':\r | |
3961 | continue\r | |
3962 | \r | |
3963 | AutoGenList.append(str(File))\r | |
3964 | else:\r | |
3965 | IgoredAutoGenList.append(str(File))\r | |
3966 | \r | |
3967 | # Skip the following code for EDK I inf\r | |
3968 | if self.AutoGenVersion < 0x00010005:\r | |
3969 | return\r | |
3970 | \r | |
3971 | for ModuleType in self.DepexList:\r | |
3972 | # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r | |
3973 | if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED:\r | |
3974 | continue\r | |
3975 | \r | |
3976 | Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r | |
3977 | DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r | |
3978 | \r | |
3979 | if len(Dpx.PostfixNotation) != 0:\r | |
3980 | self.DepexGenerated = True\r | |
3981 | \r | |
3982 | if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r | |
3983 | AutoGenList.append(str(DpxFile))\r | |
3984 | else:\r | |
3985 | IgoredAutoGenList.append(str(DpxFile))\r | |
3986 | \r | |
3987 | if IgoredAutoGenList == []:\r | |
3988 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r | |
3989 | (" ".join(AutoGenList), self.Name, self.Arch))\r | |
3990 | elif AutoGenList == []:\r | |
3991 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r | |
3992 | (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
3993 | else:\r | |
3994 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r | |
3995 | (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
3996 | \r | |
3997 | self.IsCodeFileCreated = True\r | |
3998 | return AutoGenList\r | |
3999 | \r | |
4000 | ## Summarize the ModuleAutoGen objects of all libraries used by this module\r | |
4001 | @cached_property\r | |
4002 | def LibraryAutoGenList(self):\r | |
4003 | RetVal = []\r | |
4004 | for Library in self.DependentLibraryList:\r | |
4005 | La = ModuleAutoGen(\r | |
4006 | self.Workspace,\r | |
4007 | Library.MetaFile,\r | |
4008 | self.BuildTarget,\r | |
4009 | self.ToolChain,\r | |
4010 | self.Arch,\r | |
4011 | self.PlatformInfo.MetaFile\r | |
4012 | )\r | |
4013 | if La not in RetVal:\r | |
4014 | RetVal.append(La)\r | |
4015 | for Lib in La.CodaTargetList:\r | |
4016 | self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r | |
4017 | return RetVal\r | |
4018 | \r | |
4019 | def GenModuleHash(self):\r | |
4020 | if self.Arch not in GlobalData.gModuleHash:\r | |
4021 | GlobalData.gModuleHash[self.Arch] = {}\r | |
4022 | m = hashlib.md5()\r | |
4023 | # Add Platform level hash\r | |
4024 | m.update(GlobalData.gPlatformHash)\r | |
4025 | # Add Package level hash\r | |
4026 | if self.DependentPackageList:\r | |
4027 | for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r | |
4028 | if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r | |
4029 | m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r | |
4030 | \r | |
4031 | # Add Library hash\r | |
4032 | if self.LibraryAutoGenList:\r | |
4033 | for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r | |
4034 | if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r | |
4035 | Lib.GenModuleHash()\r | |
4036 | m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r | |
4037 | \r | |
4038 | # Add Module self\r | |
4039 | f = open(str(self.MetaFile), 'r')\r | |
4040 | Content = f.read()\r | |
4041 | f.close()\r | |
4042 | m.update(Content)\r | |
4043 | # Add Module's source files\r | |
4044 | if self.SourceFileList:\r | |
4045 | for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r | |
4046 | f = open(str(File), 'r')\r | |
4047 | Content = f.read()\r | |
4048 | f.close()\r | |
4049 | m.update(Content)\r | |
4050 | \r | |
4051 | ModuleHashFile = path.join(self.BuildDir, self.Name + ".hash")\r | |
4052 | if self.Name not in GlobalData.gModuleHash[self.Arch]:\r | |
4053 | GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r | |
4054 | if GlobalData.gBinCacheSource:\r | |
4055 | if self.AttemptModuleCacheCopy():\r | |
4056 | return False\r | |
4057 | return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)\r | |
4058 | \r | |
4059 | ## Decide whether we can skip the ModuleAutoGen process\r | |
4060 | def CanSkipbyHash(self):\r | |
4061 | if GlobalData.gUseHashCache:\r | |
4062 | return not self.GenModuleHash()\r | |
4063 | return False\r | |
4064 | \r | |
4065 | ## Decide whether we can skip the ModuleAutoGen process\r | |
4066 | # If any source file is newer than the module than we cannot skip\r | |
4067 | #\r | |
4068 | def CanSkip(self):\r | |
4069 | if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r | |
4070 | return True\r | |
4071 | if not os.path.exists(self.TimeStampPath):\r | |
4072 | return False\r | |
4073 | #last creation time of the module\r | |
4074 | DstTimeStamp = os.stat(self.TimeStampPath)[8]\r | |
4075 | \r | |
4076 | SrcTimeStamp = self.Workspace._SrcTimeStamp\r | |
4077 | if SrcTimeStamp > DstTimeStamp:\r | |
4078 | return False\r | |
4079 | \r | |
4080 | with open(self.TimeStampPath,'r') as f:\r | |
4081 | for source in f:\r | |
4082 | source = source.rstrip('\n')\r | |
4083 | if not os.path.exists(source):\r | |
4084 | return False\r | |
4085 | if source not in ModuleAutoGen.TimeDict :\r | |
4086 | ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r | |
4087 | if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r | |
4088 | return False\r | |
4089 | GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r | |
4090 | return True\r | |
4091 | \r | |
4092 | @cached_property\r | |
4093 | def TimeStampPath(self):\r | |
4094 | return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r |