]>
Commit | Line | Data |
---|---|---|
1 | ## @file\r | |
2 | # Generate AutoGen.h, AutoGen.c and *.depex files\r | |
3 | #\r | |
4 | # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>\r | |
5 | # Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>\r | |
6 | #\r | |
7 | # This program and the accompanying materials\r | |
8 | # are licensed and made available under the terms and conditions of the BSD License\r | |
9 | # which accompanies this distribution. The full text of the license may be found at\r | |
10 | # http://opensource.org/licenses/bsd-license.php\r | |
11 | #\r | |
12 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
13 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
14 | #\r | |
15 | \r | |
16 | ## Import Modules\r | |
17 | #\r | |
18 | from __future__ import print_function\r | |
19 | from __future__ import absolute_import\r | |
20 | import Common.LongFilePathOs as os\r | |
21 | import re\r | |
22 | import os.path as path\r | |
23 | import copy\r | |
24 | import uuid\r | |
25 | \r | |
26 | from . import GenC\r | |
27 | from . import GenMake\r | |
28 | from . import GenDepex\r | |
29 | from io import BytesIO\r | |
30 | \r | |
31 | from .StrGather import *\r | |
32 | from .BuildEngine import BuildRule\r | |
33 | \r | |
34 | from Common.LongFilePathSupport import CopyLongFilePath\r | |
35 | from Common.BuildToolError import *\r | |
36 | from Common.DataType import *\r | |
37 | from Common.Misc import *\r | |
38 | from Common.StringUtils import *\r | |
39 | import Common.GlobalData as GlobalData\r | |
40 | from GenFds.FdfParser import *\r | |
41 | from CommonDataClass.CommonClass import SkuInfoClass\r | |
42 | from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r | |
43 | import Common.VpdInfoFile as VpdInfoFile\r | |
44 | from .GenPcdDb import CreatePcdDatabaseCode\r | |
45 | from Workspace.MetaFileCommentParser import UsageList\r | |
46 | from Workspace.WorkspaceCommon import GetModuleLibInstances\r | |
47 | from Common.MultipleWorkspace import MultipleWorkspace as mws\r | |
48 | from . import InfSectionParser\r | |
49 | import datetime\r | |
50 | import hashlib\r | |
51 | from .GenVar import VariableMgr, var_info\r | |
52 | from collections import OrderedDict\r | |
53 | from collections import defaultdict\r | |
54 | from Workspace.WorkspaceCommon import OrderedListDict\r | |
55 | \r | |
56 | from Common.caching import cached_property, cached_class_function\r | |
57 | \r | |
58 | ## Regular expression for splitting Dependency Expression string into tokens\r | |
59 | gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")\r | |
60 | \r | |
61 | ## Regular expression for match: PCD(xxxx.yyy)\r | |
62 | gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")\r | |
63 | \r | |
64 | #\r | |
65 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
66 | # is the former use /I , the Latter used -I to specify include directories\r | |
67 | #\r | |
68 | gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
69 | gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
70 | \r | |
71 | #\r | |
72 | # Match name = variable\r | |
73 | #\r | |
74 | gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r | |
75 | #\r | |
76 | # The format of guid in efivarstore statement likes following and must be correct:\r | |
77 | # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r | |
78 | #\r | |
79 | gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r | |
80 | \r | |
81 | ## Mapping Makefile type\r | |
82 | gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r | |
83 | \r | |
84 | \r | |
85 | ## Build rule configuration file\r | |
86 | gDefaultBuildRuleFile = 'build_rule.txt'\r | |
87 | \r | |
88 | ## Tools definition configuration file\r | |
89 | gDefaultToolsDefFile = 'tools_def.txt'\r | |
90 | \r | |
91 | ## Build rule default version\r | |
92 | AutoGenReqBuildRuleVerNum = "0.1"\r | |
93 | \r | |
94 | ## default file name for AutoGen\r | |
95 | gAutoGenCodeFileName = "AutoGen.c"\r | |
96 | gAutoGenHeaderFileName = "AutoGen.h"\r | |
97 | gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r | |
98 | gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r | |
99 | gAutoGenDepexFileName = "%(module_name)s.depex"\r | |
100 | gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r | |
101 | gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r | |
102 | gInfSpecVersion = "0x00010017"\r | |
103 | \r | |
104 | #\r | |
105 | # Template string to generic AsBuilt INF\r | |
106 | #\r | |
107 | gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r | |
108 | \r | |
109 | # DO NOT EDIT\r | |
110 | # FILE auto-generated\r | |
111 | \r | |
112 | [Defines]\r | |
113 | INF_VERSION = ${module_inf_version}\r | |
114 | BASE_NAME = ${module_name}\r | |
115 | FILE_GUID = ${module_guid}\r | |
116 | MODULE_TYPE = ${module_module_type}${BEGIN}\r | |
117 | VERSION_STRING = ${module_version_string}${END}${BEGIN}\r | |
118 | PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r | |
119 | UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r | |
120 | PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r | |
121 | ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r | |
122 | UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r | |
123 | CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r | |
124 | DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r | |
125 | SHADOW = ${module_shadow}${END}${BEGIN}\r | |
126 | PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r | |
127 | PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r | |
128 | PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r | |
129 | PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r | |
130 | BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r | |
131 | SPEC = ${module_spec}${END}${BEGIN}\r | |
132 | UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r | |
133 | MODULE_UNI_FILE = ${module_uni_file}${END}\r | |
134 | \r | |
135 | [Packages.${module_arch}]${BEGIN}\r | |
136 | ${package_item}${END}\r | |
137 | \r | |
138 | [Binaries.${module_arch}]${BEGIN}\r | |
139 | ${binary_item}${END}\r | |
140 | \r | |
141 | [PatchPcd.${module_arch}]${BEGIN}\r | |
142 | ${patchablepcd_item}\r | |
143 | ${END}\r | |
144 | \r | |
145 | [Protocols.${module_arch}]${BEGIN}\r | |
146 | ${protocol_item}\r | |
147 | ${END}\r | |
148 | \r | |
149 | [Ppis.${module_arch}]${BEGIN}\r | |
150 | ${ppi_item}\r | |
151 | ${END}\r | |
152 | \r | |
153 | [Guids.${module_arch}]${BEGIN}\r | |
154 | ${guid_item}\r | |
155 | ${END}\r | |
156 | \r | |
157 | [PcdEx.${module_arch}]${BEGIN}\r | |
158 | ${pcd_item}\r | |
159 | ${END}\r | |
160 | \r | |
161 | [LibraryClasses.${module_arch}]\r | |
162 | ## @LIB_INSTANCES${BEGIN}\r | |
163 | # ${libraryclasses_item}${END}\r | |
164 | \r | |
165 | ${depexsection_item}\r | |
166 | \r | |
167 | ${userextension_tianocore_item}\r | |
168 | \r | |
169 | ${tail_comments}\r | |
170 | \r | |
171 | [BuildOptions.${module_arch}]\r | |
172 | ## @AsBuilt${BEGIN}\r | |
173 | ## ${flags_item}${END}\r | |
174 | """)\r | |
175 | \r | |
176 | ## Base class for AutoGen\r | |
177 | #\r | |
178 | # This class just implements the cache mechanism of AutoGen objects.\r | |
179 | #\r | |
180 | class AutoGen(object):\r | |
181 | # database to maintain the objects in each child class\r | |
182 | __ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object\r | |
183 | \r | |
184 | ## Factory method\r | |
185 | #\r | |
186 | # @param Class class object of real AutoGen class\r | |
187 | # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)\r | |
188 | # @param Workspace Workspace directory or WorkspaceAutoGen object\r | |
189 | # @param MetaFile The path of meta file\r | |
190 | # @param Target Build target\r | |
191 | # @param Toolchain Tool chain name\r | |
192 | # @param Arch Target arch\r | |
193 | # @param *args The specific class related parameters\r | |
194 | # @param **kwargs The specific class related dict parameters\r | |
195 | #\r | |
196 | def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
197 | # check if the object has been created\r | |
198 | Key = (Target, Toolchain, Arch, MetaFile)\r | |
199 | if Key in cls.__ObjectCache:\r | |
200 | # if it exists, just return it directly\r | |
201 | return cls.__ObjectCache[Key]\r | |
202 | # it didnt exist. create it, cache it, then return it\r | |
203 | RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)\r | |
204 | return RetVal\r | |
205 | \r | |
206 | def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
207 | super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
208 | \r | |
209 | ## hash() operator\r | |
210 | #\r | |
211 | # The file path of platform file will be used to represent hash value of this object\r | |
212 | #\r | |
213 | # @retval int Hash value of the file path of platform file\r | |
214 | #\r | |
215 | def __hash__(self):\r | |
216 | return hash(self.MetaFile)\r | |
217 | \r | |
218 | ## str() operator\r | |
219 | #\r | |
220 | # The file path of platform file will be used to represent this object\r | |
221 | #\r | |
222 | # @retval string String of platform file path\r | |
223 | #\r | |
224 | def __str__(self):\r | |
225 | return str(self.MetaFile)\r | |
226 | \r | |
227 | ## "==" operator\r | |
228 | def __eq__(self, Other):\r | |
229 | return Other and self.MetaFile == Other\r | |
230 | \r | |
231 | ## Workspace AutoGen class\r | |
232 | #\r | |
233 | # This class is used mainly to control the whole platform build for different\r | |
234 | # architecture. This class will generate top level makefile.\r | |
235 | #\r | |
236 | class WorkspaceAutoGen(AutoGen):\r | |
237 | # call super().__init__ then call the worker function with different parameter count\r | |
238 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
239 | if not hasattr(self, "_Init"):\r | |
240 | super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
241 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
242 | self._Init = True\r | |
243 | \r | |
244 | ## Initialize WorkspaceAutoGen\r | |
245 | #\r | |
246 | # @param WorkspaceDir Root directory of workspace\r | |
247 | # @param ActivePlatform Meta-file of active platform\r | |
248 | # @param Target Build target\r | |
249 | # @param Toolchain Tool chain name\r | |
250 | # @param ArchList List of architecture of current build\r | |
251 | # @param MetaFileDb Database containing meta-files\r | |
252 | # @param BuildConfig Configuration of build\r | |
253 | # @param ToolDefinition Tool chain definitions\r | |
254 | # @param FlashDefinitionFile File of flash definition\r | |
255 | # @param Fds FD list to be generated\r | |
256 | # @param Fvs FV list to be generated\r | |
257 | # @param Caps Capsule list to be generated\r | |
258 | # @param SkuId SKU id from command line\r | |
259 | #\r | |
260 | def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,\r | |
261 | BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,\r | |
262 | Progress=None, BuildModule=None):\r | |
263 | self.BuildDatabase = MetaFileDb\r | |
264 | self.MetaFile = ActivePlatform\r | |
265 | self.WorkspaceDir = WorkspaceDir\r | |
266 | self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]\r | |
267 | GlobalData.gActivePlatform = self.Platform\r | |
268 | self.BuildTarget = Target\r | |
269 | self.ToolChain = Toolchain\r | |
270 | self.ArchList = ArchList\r | |
271 | self.SkuId = SkuId\r | |
272 | self.UniFlag = UniFlag\r | |
273 | \r | |
274 | self.TargetTxt = BuildConfig\r | |
275 | self.ToolDef = ToolDefinition\r | |
276 | self.FdfFile = FlashDefinitionFile\r | |
277 | self.FdTargetList = Fds if Fds else []\r | |
278 | self.FvTargetList = Fvs if Fvs else []\r | |
279 | self.CapTargetList = Caps if Caps else []\r | |
280 | self.AutoGenObjectList = []\r | |
281 | self._BuildDir = None\r | |
282 | self._FvDir = None\r | |
283 | self._MakeFileDir = None\r | |
284 | self._BuildCommand = None\r | |
285 | self._GuidDict = {}\r | |
286 | \r | |
287 | # there's many relative directory operations, so ...\r | |
288 | os.chdir(self.WorkspaceDir)\r | |
289 | \r | |
290 | #\r | |
291 | # Merge Arch\r | |
292 | #\r | |
293 | if not self.ArchList:\r | |
294 | ArchList = set(self.Platform.SupArchList)\r | |
295 | else:\r | |
296 | ArchList = set(self.ArchList) & set(self.Platform.SupArchList)\r | |
297 | if not ArchList:\r | |
298 | EdkLogger.error("build", PARAMETER_INVALID,\r | |
299 | ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))\r | |
300 | elif self.ArchList and len(ArchList) != len(self.ArchList):\r | |
301 | SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))\r | |
302 | EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"\r | |
303 | % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))\r | |
304 | self.ArchList = tuple(ArchList)\r | |
305 | \r | |
306 | # Validate build target\r | |
307 | if self.BuildTarget not in self.Platform.BuildTargets:\r | |
308 | EdkLogger.error("build", PARAMETER_INVALID,\r | |
309 | ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"\r | |
310 | % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))\r | |
311 | \r | |
312 | \r | |
313 | # parse FDF file to get PCDs in it, if any\r | |
314 | if not self.FdfFile:\r | |
315 | self.FdfFile = self.Platform.FlashDefinition\r | |
316 | \r | |
317 | EdkLogger.info("")\r | |
318 | if self.ArchList:\r | |
319 | EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))\r | |
320 | EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))\r | |
321 | EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))\r | |
322 | \r | |
323 | EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))\r | |
324 | if BuildModule:\r | |
325 | EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))\r | |
326 | \r | |
327 | if self.FdfFile:\r | |
328 | EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))\r | |
329 | \r | |
330 | EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)\r | |
331 | \r | |
332 | if Progress:\r | |
333 | Progress.Start("\nProcessing meta-data")\r | |
334 | \r | |
335 | if self.FdfFile:\r | |
336 | #\r | |
337 | # Mark now build in AutoGen Phase\r | |
338 | #\r | |
339 | GlobalData.gAutoGenPhase = True\r | |
340 | Fdf = FdfParser(self.FdfFile.Path)\r | |
341 | Fdf.ParseFile()\r | |
342 | GlobalData.gFdfParser = Fdf\r | |
343 | GlobalData.gAutoGenPhase = False\r | |
344 | PcdSet = Fdf.Profile.PcdDict\r | |
345 | if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:\r | |
346 | FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]\r | |
347 | for FdRegion in FdDict.RegionList:\r | |
348 | if str(FdRegion.RegionType) is 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):\r | |
349 | if int(FdRegion.Offset) % 8 != 0:\r | |
350 | EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))\r | |
351 | ModuleList = Fdf.Profile.InfList\r | |
352 | self.FdfProfile = Fdf.Profile\r | |
353 | for fvname in self.FvTargetList:\r | |
354 | if fvname.upper() not in self.FdfProfile.FvDict:\r | |
355 | EdkLogger.error("build", OPTION_VALUE_INVALID,\r | |
356 | "No such an FV in FDF file: %s" % fvname)\r | |
357 | \r | |
358 | # In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,\r | |
359 | # but the path (self.MetaFile.Path) is the real path\r | |
360 | for key in self.FdfProfile.InfDict:\r | |
361 | if key == 'ArchTBD':\r | |
362 | MetaFile_cache = defaultdict(set)\r | |
363 | for Arch in self.ArchList:\r | |
364 | Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r | |
365 | for Pkey in Current_Platform_cache.Modules:\r | |
366 | MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)\r | |
367 | for Inf in self.FdfProfile.InfDict[key]:\r | |
368 | ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r | |
369 | for Arch in self.ArchList:\r | |
370 | if ModuleFile in MetaFile_cache[Arch]:\r | |
371 | break\r | |
372 | else:\r | |
373 | ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]\r | |
374 | if not ModuleData.IsBinaryModule:\r | |
375 | EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)\r | |
376 | \r | |
377 | else:\r | |
378 | for Arch in self.ArchList:\r | |
379 | if Arch == key:\r | |
380 | Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r | |
381 | MetaFileList = set()\r | |
382 | for Pkey in Platform.Modules:\r | |
383 | MetaFileList.add(Platform.Modules[Pkey].MetaFile)\r | |
384 | for Inf in self.FdfProfile.InfDict[key]:\r | |
385 | ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r | |
386 | if ModuleFile in MetaFileList:\r | |
387 | continue\r | |
388 | ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]\r | |
389 | if not ModuleData.IsBinaryModule:\r | |
390 | EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)\r | |
391 | \r | |
392 | else:\r | |
393 | PcdSet = {}\r | |
394 | ModuleList = []\r | |
395 | self.FdfProfile = None\r | |
396 | if self.FdTargetList:\r | |
397 | EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))\r | |
398 | self.FdTargetList = []\r | |
399 | if self.FvTargetList:\r | |
400 | EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))\r | |
401 | self.FvTargetList = []\r | |
402 | if self.CapTargetList:\r | |
403 | EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))\r | |
404 | self.CapTargetList = []\r | |
405 | \r | |
406 | # apply SKU and inject PCDs from Flash Definition file\r | |
407 | for Arch in self.ArchList:\r | |
408 | Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r | |
409 | PlatformPcds = Platform.Pcds\r | |
410 | self._GuidDict = Platform._GuidDict\r | |
411 | SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}\r | |
412 | BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}\r | |
413 | SourcePcdDict_Keys = SourcePcdDict.keys()\r | |
414 | BinaryPcdDict_Keys = BinaryPcdDict.keys()\r | |
415 | \r | |
416 | # generate the SourcePcdDict and BinaryPcdDict\r | |
417 | PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r | |
418 | for BuildData in PGen.BuildDatabase._CACHE_.values():\r | |
419 | if BuildData.Arch != Arch:\r | |
420 | continue\r | |
421 | if BuildData.MetaFile.Ext == '.inf':\r | |
422 | for key in BuildData.Pcds:\r | |
423 | if BuildData.Pcds[key].Pending:\r | |
424 | if key in Platform.Pcds:\r | |
425 | PcdInPlatform = Platform.Pcds[key]\r | |
426 | if PcdInPlatform.Type:\r | |
427 | BuildData.Pcds[key].Type = PcdInPlatform.Type\r | |
428 | BuildData.Pcds[key].Pending = False\r | |
429 | \r | |
430 | if BuildData.MetaFile in Platform.Modules:\r | |
431 | PlatformModule = Platform.Modules[str(BuildData.MetaFile)]\r | |
432 | if key in PlatformModule.Pcds:\r | |
433 | PcdInPlatform = PlatformModule.Pcds[key]\r | |
434 | if PcdInPlatform.Type:\r | |
435 | BuildData.Pcds[key].Type = PcdInPlatform.Type\r | |
436 | BuildData.Pcds[key].Pending = False\r | |
437 | else:\r | |
438 | #Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending\r | |
439 | if BuildData.Pcds[key].Pending:\r | |
440 | MGen = ModuleAutoGen(self, BuildData.MetaFile, Target, Toolchain, Arch, self.MetaFile)\r | |
441 | if MGen and MGen.IsLibrary:\r | |
442 | if MGen in PGen.LibraryAutoGenList:\r | |
443 | ReferenceModules = MGen.ReferenceModules\r | |
444 | for ReferenceModule in ReferenceModules:\r | |
445 | if ReferenceModule.MetaFile in Platform.Modules:\r | |
446 | RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]\r | |
447 | if key in RefPlatformModule.Pcds:\r | |
448 | PcdInReferenceModule = RefPlatformModule.Pcds[key]\r | |
449 | if PcdInReferenceModule.Type:\r | |
450 | BuildData.Pcds[key].Type = PcdInReferenceModule.Type\r | |
451 | BuildData.Pcds[key].Pending = False\r | |
452 | break\r | |
453 | \r | |
454 | if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:\r | |
455 | if BuildData.IsBinaryModule:\r | |
456 | BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
457 | else:\r | |
458 | SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
459 | \r | |
460 | elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:\r | |
461 | if BuildData.MetaFile.Ext == '.inf':\r | |
462 | if BuildData.IsBinaryModule:\r | |
463 | BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
464 | else:\r | |
465 | SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
466 | \r | |
467 | elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:\r | |
468 | SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
469 | elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:\r | |
470 | SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))\r | |
471 | else:\r | |
472 | pass\r | |
473 | #\r | |
474 | # A PCD can only use one type for all source modules\r | |
475 | #\r | |
476 | for i in SourcePcdDict_Keys:\r | |
477 | for j in SourcePcdDict_Keys:\r | |
478 | if i != j:\r | |
479 | Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])\r | |
480 | if len(Intersections) > 0:\r | |
481 | EdkLogger.error(\r | |
482 | 'build',\r | |
483 | FORMAT_INVALID,\r | |
484 | "Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),\r | |
485 | ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)\r | |
486 | )\r | |
487 | \r | |
488 | #\r | |
489 | # intersection the BinaryPCD for Mixed PCD\r | |
490 | #\r | |
491 | for i in BinaryPcdDict_Keys:\r | |
492 | for j in BinaryPcdDict_Keys:\r | |
493 | if i != j:\r | |
494 | Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])\r | |
495 | for item in Intersections:\r | |
496 | NewPcd1 = (item[0] + '_' + i, item[1])\r | |
497 | NewPcd2 = (item[0] + '_' + j, item[1])\r | |
498 | if item not in GlobalData.MixedPcd:\r | |
499 | GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]\r | |
500 | else:\r | |
501 | if NewPcd1 not in GlobalData.MixedPcd[item]:\r | |
502 | GlobalData.MixedPcd[item].append(NewPcd1)\r | |
503 | if NewPcd2 not in GlobalData.MixedPcd[item]:\r | |
504 | GlobalData.MixedPcd[item].append(NewPcd2)\r | |
505 | \r | |
506 | #\r | |
507 | # intersection the SourcePCD and BinaryPCD for Mixed PCD\r | |
508 | #\r | |
509 | for i in SourcePcdDict_Keys:\r | |
510 | for j in BinaryPcdDict_Keys:\r | |
511 | if i != j:\r | |
512 | Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])\r | |
513 | for item in Intersections:\r | |
514 | NewPcd1 = (item[0] + '_' + i, item[1])\r | |
515 | NewPcd2 = (item[0] + '_' + j, item[1])\r | |
516 | if item not in GlobalData.MixedPcd:\r | |
517 | GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]\r | |
518 | else:\r | |
519 | if NewPcd1 not in GlobalData.MixedPcd[item]:\r | |
520 | GlobalData.MixedPcd[item].append(NewPcd1)\r | |
521 | if NewPcd2 not in GlobalData.MixedPcd[item]:\r | |
522 | GlobalData.MixedPcd[item].append(NewPcd2)\r | |
523 | \r | |
524 | for BuildData in PGen.BuildDatabase._CACHE_.values():\r | |
525 | if BuildData.Arch != Arch:\r | |
526 | continue\r | |
527 | for key in BuildData.Pcds:\r | |
528 | for SinglePcd in GlobalData.MixedPcd:\r | |
529 | if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:\r | |
530 | for item in GlobalData.MixedPcd[SinglePcd]:\r | |
531 | Pcd_Type = item[0].split('_')[-1]\r | |
532 | if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \\r | |
533 | (Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):\r | |
534 | Value = BuildData.Pcds[key]\r | |
535 | Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type\r | |
536 | if len(key) == 2:\r | |
537 | newkey = (Value.TokenCName, key[1])\r | |
538 | elif len(key) == 3:\r | |
539 | newkey = (Value.TokenCName, key[1], key[2])\r | |
540 | del BuildData.Pcds[key]\r | |
541 | BuildData.Pcds[newkey] = Value\r | |
542 | break\r | |
543 | break\r | |
544 | \r | |
545 | # handle the mixed pcd in FDF file\r | |
546 | for key in PcdSet:\r | |
547 | if key in GlobalData.MixedPcd:\r | |
548 | Value = PcdSet[key]\r | |
549 | del PcdSet[key]\r | |
550 | for item in GlobalData.MixedPcd[key]:\r | |
551 | PcdSet[item] = Value\r | |
552 | \r | |
553 | #Collect package set information from INF of FDF\r | |
554 | PkgSet = set()\r | |
555 | for Inf in ModuleList:\r | |
556 | ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)\r | |
557 | if ModuleFile in Platform.Modules:\r | |
558 | continue\r | |
559 | ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]\r | |
560 | PkgSet.update(ModuleData.Packages)\r | |
561 | Pkgs = list(PkgSet) + list(PGen.PackageList)\r | |
562 | DecPcds = set()\r | |
563 | DecPcdsKey = set()\r | |
564 | for Pkg in Pkgs:\r | |
565 | for Pcd in Pkg.Pcds:\r | |
566 | DecPcds.add((Pcd[0], Pcd[1]))\r | |
567 | DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))\r | |
568 | \r | |
569 | Platform.SkuName = self.SkuId\r | |
570 | for Name, Guid,Fileds in PcdSet:\r | |
571 | if (Name, Guid) not in DecPcds:\r | |
572 | EdkLogger.error(\r | |
573 | 'build',\r | |
574 | PARSER_ERROR,\r | |
575 | "PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),\r | |
576 | File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],\r | |
577 | Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]\r | |
578 | )\r | |
579 | else:\r | |
580 | # Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.\r | |
581 | if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \\r | |
582 | or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \\r | |
583 | or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:\r | |
584 | continue\r | |
585 | elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:\r | |
586 | EdkLogger.error(\r | |
587 | 'build',\r | |
588 | PARSER_ERROR,\r | |
589 | "Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),\r | |
590 | File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],\r | |
591 | Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]\r | |
592 | )\r | |
593 | \r | |
594 | Pa = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)\r | |
595 | #\r | |
596 | # Explicitly collect platform's dynamic PCDs\r | |
597 | #\r | |
598 | Pa.CollectPlatformDynamicPcds()\r | |
599 | Pa.CollectFixedAtBuildPcds()\r | |
600 | self.AutoGenObjectList.append(Pa)\r | |
601 | \r | |
602 | #\r | |
603 | # Generate Package level hash value\r | |
604 | #\r | |
605 | GlobalData.gPackageHash[Arch] = {}\r | |
606 | if GlobalData.gUseHashCache:\r | |
607 | for Pkg in Pkgs:\r | |
608 | self._GenPkgLevelHash(Pkg)\r | |
609 | \r | |
610 | #\r | |
611 | # Check PCDs token value conflict in each DEC file.\r | |
612 | #\r | |
613 | self._CheckAllPcdsTokenValueConflict()\r | |
614 | \r | |
615 | #\r | |
616 | # Check PCD type and definition between DSC and DEC\r | |
617 | #\r | |
618 | self._CheckPcdDefineAndType()\r | |
619 | \r | |
620 | #\r | |
621 | # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.\r | |
622 | #\r | |
623 | content = 'gCommandLineDefines: '\r | |
624 | content += str(GlobalData.gCommandLineDefines)\r | |
625 | content += os.linesep\r | |
626 | content += 'BuildOptionPcd: '\r | |
627 | content += str(GlobalData.BuildOptionPcd)\r | |
628 | content += os.linesep\r | |
629 | content += 'Active Platform: '\r | |
630 | content += str(self.Platform)\r | |
631 | content += os.linesep\r | |
632 | if self.FdfFile:\r | |
633 | content += 'Flash Image Definition: '\r | |
634 | content += str(self.FdfFile)\r | |
635 | content += os.linesep\r | |
636 | SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)\r | |
637 | \r | |
638 | #\r | |
639 | # Create PcdToken Number file for Dynamic/DynamicEx Pcd.\r | |
640 | #\r | |
641 | PcdTokenNumber = 'PcdTokenNumber: '\r | |
642 | if Pa.PcdTokenNumber:\r | |
643 | if Pa.DynamicPcdList:\r | |
644 | for Pcd in Pa.DynamicPcdList:\r | |
645 | PcdTokenNumber += os.linesep\r | |
646 | PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))\r | |
647 | PcdTokenNumber += ' : '\r | |
648 | PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])\r | |
649 | SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)\r | |
650 | \r | |
651 | #\r | |
652 | # Get set of workspace metafiles\r | |
653 | #\r | |
654 | AllWorkSpaceMetaFiles = self._GetMetaFiles(Target, Toolchain, Arch)\r | |
655 | \r | |
656 | #\r | |
657 | # Retrieve latest modified time of all metafiles\r | |
658 | #\r | |
659 | SrcTimeStamp = 0\r | |
660 | for f in AllWorkSpaceMetaFiles:\r | |
661 | if os.stat(f)[8] > SrcTimeStamp:\r | |
662 | SrcTimeStamp = os.stat(f)[8]\r | |
663 | self._SrcTimeStamp = SrcTimeStamp\r | |
664 | \r | |
665 | if GlobalData.gUseHashCache:\r | |
666 | m = hashlib.md5()\r | |
667 | for files in AllWorkSpaceMetaFiles:\r | |
668 | if files.endswith('.dec'):\r | |
669 | continue\r | |
670 | f = open(files, 'r')\r | |
671 | Content = f.read()\r | |
672 | f.close()\r | |
673 | m.update(Content)\r | |
674 | SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)\r | |
675 | GlobalData.gPlatformHash = m.hexdigest()\r | |
676 | \r | |
677 | #\r | |
678 | # Write metafile list to build directory\r | |
679 | #\r | |
680 | AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')\r | |
681 | if os.path.exists (AutoGenFilePath):\r | |
682 | os.remove(AutoGenFilePath)\r | |
683 | if not os.path.exists(self.BuildDir):\r | |
684 | os.makedirs(self.BuildDir)\r | |
685 | with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:\r | |
686 | for f in AllWorkSpaceMetaFiles:\r | |
687 | print(f, file=file)\r | |
688 | return True\r | |
689 | \r | |
690 | def _GenPkgLevelHash(self, Pkg):\r | |
691 | if Pkg.PackageName in GlobalData.gPackageHash[Pkg.Arch]:\r | |
692 | return\r | |
693 | \r | |
694 | PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName)\r | |
695 | CreateDirectory(PkgDir)\r | |
696 | HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r | |
697 | m = hashlib.md5()\r | |
698 | # Get .dec file's hash value\r | |
699 | f = open(Pkg.MetaFile.Path, 'r')\r | |
700 | Content = f.read()\r | |
701 | f.close()\r | |
702 | m.update(Content)\r | |
703 | # Get include files hash value\r | |
704 | if Pkg.Includes:\r | |
705 | for inc in sorted(Pkg.Includes, key=lambda x: str(x)):\r | |
706 | for Root, Dirs, Files in os.walk(str(inc)):\r | |
707 | for File in sorted(Files):\r | |
708 | File_Path = os.path.join(Root, File)\r | |
709 | f = open(File_Path, 'r')\r | |
710 | Content = f.read()\r | |
711 | f.close()\r | |
712 | m.update(Content)\r | |
713 | SaveFileOnChange(HashFile, m.hexdigest(), True)\r | |
714 | GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()\r | |
715 | \r | |
716 | def _GetMetaFiles(self, Target, Toolchain, Arch):\r | |
717 | AllWorkSpaceMetaFiles = set()\r | |
718 | #\r | |
719 | # add fdf\r | |
720 | #\r | |
721 | if self.FdfFile:\r | |
722 | AllWorkSpaceMetaFiles.add (self.FdfFile.Path)\r | |
723 | for f in GlobalData.gFdfParser.GetAllIncludedFile():\r | |
724 | AllWorkSpaceMetaFiles.add (f.FileName)\r | |
725 | #\r | |
726 | # add dsc\r | |
727 | #\r | |
728 | AllWorkSpaceMetaFiles.add(self.MetaFile.Path)\r | |
729 | \r | |
730 | #\r | |
731 | # add build_rule.txt & tools_def.txt\r | |
732 | #\r | |
733 | AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))\r | |
734 | AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))\r | |
735 | \r | |
736 | # add BuildOption metafile\r | |
737 | #\r | |
738 | AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))\r | |
739 | \r | |
740 | # add PcdToken Number file for Dynamic/DynamicEx Pcd\r | |
741 | #\r | |
742 | AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))\r | |
743 | \r | |
744 | for Arch in self.ArchList:\r | |
745 | #\r | |
746 | # add dec\r | |
747 | #\r | |
748 | for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:\r | |
749 | AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)\r | |
750 | \r | |
751 | #\r | |
752 | # add included dsc\r | |
753 | #\r | |
754 | for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:\r | |
755 | AllWorkSpaceMetaFiles.add(filePath.Path)\r | |
756 | \r | |
757 | return AllWorkSpaceMetaFiles\r | |
758 | \r | |
759 | def _CheckPcdDefineAndType(self):\r | |
760 | PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,\r | |
761 | TAB_PCDS_PATCHABLE_IN_MODULE,\r | |
762 | TAB_PCDS_FEATURE_FLAG,\r | |
763 | TAB_PCDS_DYNAMIC,\r | |
764 | TAB_PCDS_DYNAMIC_EX}\r | |
765 | \r | |
766 | # This dict store PCDs which are not used by any modules with specified arches\r | |
767 | UnusedPcd = OrderedDict()\r | |
768 | for Pa in self.AutoGenObjectList:\r | |
769 | # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid\r | |
770 | for Pcd in Pa.Platform.Pcds:\r | |
771 | PcdType = Pa.Platform.Pcds[Pcd].Type\r | |
772 | \r | |
773 | # If no PCD type, this PCD comes from FDF\r | |
774 | if not PcdType:\r | |
775 | continue\r | |
776 | \r | |
777 | # Try to remove Hii and Vpd suffix\r | |
778 | if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):\r | |
779 | PcdType = TAB_PCDS_DYNAMIC_EX\r | |
780 | elif PcdType.startswith(TAB_PCDS_DYNAMIC):\r | |
781 | PcdType = TAB_PCDS_DYNAMIC\r | |
782 | \r | |
783 | for Package in Pa.PackageList:\r | |
784 | # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType\r | |
785 | if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:\r | |
786 | break\r | |
787 | for Type in PcdTypeSet:\r | |
788 | if (Pcd[0], Pcd[1], Type) in Package.Pcds:\r | |
789 | EdkLogger.error(\r | |
790 | 'build',\r | |
791 | FORMAT_INVALID,\r | |
792 | "Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \\r | |
793 | % (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),\r | |
794 | ExtraData=None\r | |
795 | )\r | |
796 | return\r | |
797 | else:\r | |
798 | UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)\r | |
799 | \r | |
800 | for Pcd in UnusedPcd:\r | |
801 | EdkLogger.warn(\r | |
802 | 'build',\r | |
803 | "The PCD was not specified by any INF module in the platform for the given architecture.\n"\r | |
804 | "\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"\r | |
805 | % (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),\r | |
806 | ExtraData=None\r | |
807 | )\r | |
808 | \r | |
809 | def __repr__(self):\r | |
810 | return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))\r | |
811 | \r | |
812 | ## Return the directory to store FV files\r | |
813 | def _GetFvDir(self):\r | |
814 | if self._FvDir is None:\r | |
815 | self._FvDir = path.join(self.BuildDir, TAB_FV_DIRECTORY)\r | |
816 | return self._FvDir\r | |
817 | \r | |
818 | ## Return the directory to store all intermediate and final files built\r | |
819 | def _GetBuildDir(self):\r | |
820 | if self._BuildDir is None:\r | |
821 | return self.AutoGenObjectList[0].BuildDir\r | |
822 | \r | |
823 | ## Return the build output directory platform specifies\r | |
824 | def _GetOutputDir(self):\r | |
825 | return self.Platform.OutputDirectory\r | |
826 | \r | |
827 | ## Return platform name\r | |
828 | def _GetName(self):\r | |
829 | return self.Platform.PlatformName\r | |
830 | \r | |
831 | ## Return meta-file GUID\r | |
832 | def _GetGuid(self):\r | |
833 | return self.Platform.Guid\r | |
834 | \r | |
835 | ## Return platform version\r | |
836 | def _GetVersion(self):\r | |
837 | return self.Platform.Version\r | |
838 | \r | |
839 | ## Return paths of tools\r | |
840 | def _GetToolDefinition(self):\r | |
841 | return self.AutoGenObjectList[0].ToolDefinition\r | |
842 | \r | |
843 | ## Return directory of platform makefile\r | |
844 | #\r | |
845 | # @retval string Makefile directory\r | |
846 | #\r | |
847 | def _GetMakeFileDir(self):\r | |
848 | if self._MakeFileDir is None:\r | |
849 | self._MakeFileDir = self.BuildDir\r | |
850 | return self._MakeFileDir\r | |
851 | \r | |
852 | ## Return build command string\r | |
853 | #\r | |
854 | # @retval string Build command string\r | |
855 | #\r | |
856 | def _GetBuildCommand(self):\r | |
857 | if self._BuildCommand is None:\r | |
858 | # BuildCommand should be all the same. So just get one from platform AutoGen\r | |
859 | self._BuildCommand = self.AutoGenObjectList[0].BuildCommand\r | |
860 | return self._BuildCommand\r | |
861 | \r | |
862 | ## Check the PCDs token value conflict in each DEC file.\r | |
863 | #\r | |
864 | # Will cause build break and raise error message while two PCDs conflict.\r | |
865 | #\r | |
866 | # @return None\r | |
867 | #\r | |
868 | def _CheckAllPcdsTokenValueConflict(self):\r | |
869 | for Pa in self.AutoGenObjectList:\r | |
870 | for Package in Pa.PackageList:\r | |
871 | PcdList = Package.Pcds.values()\r | |
872 | PcdList.sort(key=lambda x: int(x.TokenValue, 0))\r | |
873 | Count = 0\r | |
874 | while (Count < len(PcdList) - 1) :\r | |
875 | Item = PcdList[Count]\r | |
876 | ItemNext = PcdList[Count + 1]\r | |
877 | #\r | |
878 | # Make sure in the same token space the TokenValue should be unique\r | |
879 | #\r | |
880 | if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):\r | |
881 | SameTokenValuePcdList = []\r | |
882 | SameTokenValuePcdList.append(Item)\r | |
883 | SameTokenValuePcdList.append(ItemNext)\r | |
884 | RemainPcdListLength = len(PcdList) - Count - 2\r | |
885 | for ValueSameCount in range(RemainPcdListLength):\r | |
886 | if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):\r | |
887 | SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])\r | |
888 | else:\r | |
889 | break;\r | |
890 | #\r | |
891 | # Sort same token value PCD list with TokenGuid and TokenCName\r | |
892 | #\r | |
893 | SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r | |
894 | SameTokenValuePcdListCount = 0\r | |
895 | while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):\r | |
896 | Flag = False\r | |
897 | TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]\r | |
898 | TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]\r | |
899 | \r | |
900 | if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):\r | |
901 | for PcdItem in GlobalData.MixedPcd:\r | |
902 | if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \\r | |
903 | (TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
904 | Flag = True\r | |
905 | if not Flag:\r | |
906 | EdkLogger.error(\r | |
907 | 'build',\r | |
908 | FORMAT_INVALID,\r | |
909 | "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\\r | |
910 | % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),\r | |
911 | ExtraData=None\r | |
912 | )\r | |
913 | SameTokenValuePcdListCount += 1\r | |
914 | Count += SameTokenValuePcdListCount\r | |
915 | Count += 1\r | |
916 | \r | |
917 | PcdList = Package.Pcds.values()\r | |
918 | PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))\r | |
919 | Count = 0\r | |
920 | while (Count < len(PcdList) - 1) :\r | |
921 | Item = PcdList[Count]\r | |
922 | ItemNext = PcdList[Count + 1]\r | |
923 | #\r | |
924 | # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.\r | |
925 | #\r | |
926 | if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):\r | |
927 | EdkLogger.error(\r | |
928 | 'build',\r | |
929 | FORMAT_INVALID,\r | |
930 | "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\\r | |
931 | % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),\r | |
932 | ExtraData=None\r | |
933 | )\r | |
934 | Count += 1\r | |
935 | ## Generate fds command\r | |
936 | def _GenFdsCommand(self):\r | |
937 | return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()\r | |
938 | \r | |
939 | ## Create makefile for the platform and modules in it\r | |
940 | #\r | |
941 | # @param CreateDepsMakeFile Flag indicating if the makefile for\r | |
942 | # modules will be created as well\r | |
943 | #\r | |
944 | def CreateMakeFile(self, CreateDepsMakeFile=False):\r | |
945 | if not CreateDepsMakeFile:\r | |
946 | return\r | |
947 | for Pa in self.AutoGenObjectList:\r | |
948 | Pa.CreateMakeFile(True)\r | |
949 | \r | |
950 | ## Create autogen code for platform and modules\r | |
951 | #\r | |
952 | # Since there's no autogen code for platform, this method will do nothing\r | |
953 | # if CreateModuleCodeFile is set to False.\r | |
954 | #\r | |
955 | # @param CreateDepsCodeFile Flag indicating if creating module's\r | |
956 | # autogen code file or not\r | |
957 | #\r | |
958 | def CreateCodeFile(self, CreateDepsCodeFile=False):\r | |
959 | if not CreateDepsCodeFile:\r | |
960 | return\r | |
961 | for Pa in self.AutoGenObjectList:\r | |
962 | Pa.CreateCodeFile(True)\r | |
963 | \r | |
964 | ## Create AsBuilt INF file the platform\r | |
965 | #\r | |
966 | def CreateAsBuiltInf(self):\r | |
967 | return\r | |
968 | \r | |
969 | Name = property(_GetName)\r | |
970 | Guid = property(_GetGuid)\r | |
971 | Version = property(_GetVersion)\r | |
972 | OutputDir = property(_GetOutputDir)\r | |
973 | \r | |
974 | ToolDefinition = property(_GetToolDefinition) # toolcode : tool path\r | |
975 | \r | |
976 | BuildDir = property(_GetBuildDir)\r | |
977 | FvDir = property(_GetFvDir)\r | |
978 | MakeFileDir = property(_GetMakeFileDir)\r | |
979 | BuildCommand = property(_GetBuildCommand)\r | |
980 | GenFdsCommand = property(_GenFdsCommand)\r | |
981 | \r | |
982 | ## AutoGen class for platform\r | |
983 | #\r | |
984 | # PlatformAutoGen class will process the original information in platform\r | |
985 | # file in order to generate makefile for platform.\r | |
986 | #\r | |
987 | class PlatformAutoGen(AutoGen):\r | |
988 | # call super().__init__ then call the worker function with different parameter count\r | |
989 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
990 | if not hasattr(self, "_Init"):\r | |
991 | super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
992 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)\r | |
993 | self._Init = True\r | |
994 | #\r | |
995 | # Used to store all PCDs for both PEI and DXE phase, in order to generate\r | |
996 | # correct PCD database\r | |
997 | #\r | |
998 | _DynaPcdList_ = []\r | |
999 | _NonDynaPcdList_ = []\r | |
1000 | _PlatformPcds = {}\r | |
1001 | \r | |
1002 | #\r | |
1003 | # The priority list while override build option\r | |
1004 | #\r | |
1005 | PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)\r | |
1006 | "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
1007 | "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
1008 | "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
1009 | "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r | |
1010 | "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE\r | |
1011 | "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE\r | |
1012 | "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE\r | |
1013 | "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r | |
1014 | "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE\r | |
1015 | "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE\r | |
1016 | "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE\r | |
1017 | "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE\r | |
1018 | "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE\r | |
1019 | "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE\r | |
1020 | "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)\r | |
1021 | \r | |
1022 | ## Initialize PlatformAutoGen\r | |
1023 | #\r | |
1024 | #\r | |
1025 | # @param Workspace WorkspaceAutoGen object\r | |
1026 | # @param PlatformFile Platform file (DSC file)\r | |
1027 | # @param Target Build target (DEBUG, RELEASE)\r | |
1028 | # @param Toolchain Name of tool chain\r | |
1029 | # @param Arch arch of the platform supports\r | |
1030 | #\r | |
1031 | def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):\r | |
1032 | EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))\r | |
1033 | GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)\r | |
1034 | \r | |
1035 | self.MetaFile = PlatformFile\r | |
1036 | self.Workspace = Workspace\r | |
1037 | self.WorkspaceDir = Workspace.WorkspaceDir\r | |
1038 | self.ToolChain = Toolchain\r | |
1039 | self.BuildTarget = Target\r | |
1040 | self.Arch = Arch\r | |
1041 | self.SourceDir = PlatformFile.SubDir\r | |
1042 | self.SourceOverrideDir = None\r | |
1043 | self.FdTargetList = self.Workspace.FdTargetList\r | |
1044 | self.FvTargetList = self.Workspace.FvTargetList\r | |
1045 | self.AllPcdList = []\r | |
1046 | # get the original module/package/platform objects\r | |
1047 | self.BuildDatabase = Workspace.BuildDatabase\r | |
1048 | self.DscBuildDataObj = Workspace.Platform\r | |
1049 | self._GuidDict = Workspace._GuidDict\r | |
1050 | \r | |
1051 | # flag indicating if the makefile/C-code file has been created or not\r | |
1052 | self.IsMakeFileCreated = False\r | |
1053 | self.IsCodeFileCreated = False\r | |
1054 | \r | |
1055 | self._Platform = None\r | |
1056 | self._Name = None\r | |
1057 | self._Guid = None\r | |
1058 | self._Version = None\r | |
1059 | \r | |
1060 | self._BuildRule = None\r | |
1061 | self._SourceDir = None\r | |
1062 | self._BuildDir = None\r | |
1063 | self._OutputDir = None\r | |
1064 | self._FvDir = None\r | |
1065 | self._MakeFileDir = None\r | |
1066 | self._FdfFile = None\r | |
1067 | \r | |
1068 | self._PcdTokenNumber = None # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber\r | |
1069 | self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r | |
1070 | self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r | |
1071 | self._NonDynamicPcdDict = {}\r | |
1072 | \r | |
1073 | self._ToolDefinitions = None\r | |
1074 | self._ToolDefFile = None # toolcode : tool path\r | |
1075 | self._ToolChainFamily = None\r | |
1076 | self._BuildRuleFamily = None\r | |
1077 | self._BuildOption = None # toolcode : option\r | |
1078 | self._EdkBuildOption = None # edktoolcode : option\r | |
1079 | self._EdkIIBuildOption = None # edkiitoolcode : option\r | |
1080 | self._PackageList = None\r | |
1081 | self._ModuleAutoGenList = None\r | |
1082 | self._LibraryAutoGenList = None\r | |
1083 | self._BuildCommand = None\r | |
1084 | self._AsBuildInfList = []\r | |
1085 | self._AsBuildModuleList = []\r | |
1086 | \r | |
1087 | self.VariableInfo = None\r | |
1088 | \r | |
1089 | if GlobalData.gFdfParser is not None:\r | |
1090 | self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList\r | |
1091 | for Inf in self._AsBuildInfList:\r | |
1092 | InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)\r | |
1093 | M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1094 | if not M.IsSupportedArch:\r | |
1095 | continue\r | |
1096 | self._AsBuildModuleList.append(InfClass)\r | |
1097 | # get library/modules for build\r | |
1098 | self.LibraryBuildDirectoryList = []\r | |
1099 | self.ModuleBuildDirectoryList = []\r | |
1100 | \r | |
1101 | return True\r | |
1102 | \r | |
1103 | def __repr__(self):\r | |
1104 | return "%s [%s]" % (self.MetaFile, self.Arch)\r | |
1105 | \r | |
1106 | ## Create autogen code for platform and modules\r | |
1107 | #\r | |
1108 | # Since there's no autogen code for platform, this method will do nothing\r | |
1109 | # if CreateModuleCodeFile is set to False.\r | |
1110 | #\r | |
1111 | # @param CreateModuleCodeFile Flag indicating if creating module's\r | |
1112 | # autogen code file or not\r | |
1113 | #\r | |
1114 | def CreateCodeFile(self, CreateModuleCodeFile=False):\r | |
1115 | # only module has code to be greated, so do nothing if CreateModuleCodeFile is False\r | |
1116 | if self.IsCodeFileCreated or not CreateModuleCodeFile:\r | |
1117 | return\r | |
1118 | \r | |
1119 | for Ma in self.ModuleAutoGenList:\r | |
1120 | Ma.CreateCodeFile(True)\r | |
1121 | \r | |
1122 | # don't do this twice\r | |
1123 | self.IsCodeFileCreated = True\r | |
1124 | \r | |
1125 | ## Generate Fds Command\r | |
1126 | def _GenFdsCommand(self):\r | |
1127 | return self.Workspace.GenFdsCommand\r | |
1128 | \r | |
1129 | ## Create makefile for the platform and mdoules in it\r | |
1130 | #\r | |
1131 | # @param CreateModuleMakeFile Flag indicating if the makefile for\r | |
1132 | # modules will be created as well\r | |
1133 | #\r | |
1134 | def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):\r | |
1135 | if CreateModuleMakeFile:\r | |
1136 | for ModuleFile in self.Platform.Modules:\r | |
1137 | Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget,\r | |
1138 | self.ToolChain, self.Arch, self.MetaFile)\r | |
1139 | if (ModuleFile.File, self.Arch) in FfsCommand:\r | |
1140 | Ma.CreateMakeFile(True, FfsCommand[ModuleFile.File, self.Arch])\r | |
1141 | else:\r | |
1142 | Ma.CreateMakeFile(True)\r | |
1143 | #Ma.CreateAsBuiltInf()\r | |
1144 | \r | |
1145 | # no need to create makefile for the platform more than once\r | |
1146 | if self.IsMakeFileCreated:\r | |
1147 | return\r | |
1148 | \r | |
1149 | # create library/module build dirs for platform\r | |
1150 | Makefile = GenMake.PlatformMakefile(self)\r | |
1151 | self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()\r | |
1152 | self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()\r | |
1153 | \r | |
1154 | self.IsMakeFileCreated = True\r | |
1155 | \r | |
1156 | ## Deal with Shared FixedAtBuild Pcds\r | |
1157 | #\r | |
1158 | def CollectFixedAtBuildPcds(self):\r | |
1159 | for LibAuto in self.LibraryAutoGenList:\r | |
1160 | FixedAtBuildPcds = {}\r | |
1161 | ShareFixedAtBuildPcdsSameValue = {}\r | |
1162 | for Module in LibAuto.ReferenceModules:\r | |
1163 | for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):\r | |
1164 | DefaultValue = Pcd.DefaultValue\r | |
1165 | # Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib\r | |
1166 | if Pcd in Module.LibraryPcdList:\r | |
1167 | Index = Module.LibraryPcdList.index(Pcd)\r | |
1168 | DefaultValue = Module.LibraryPcdList[Index].DefaultValue\r | |
1169 | key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1170 | if key not in FixedAtBuildPcds:\r | |
1171 | ShareFixedAtBuildPcdsSameValue[key] = True\r | |
1172 | FixedAtBuildPcds[key] = DefaultValue\r | |
1173 | else:\r | |
1174 | if FixedAtBuildPcds[key] != DefaultValue:\r | |
1175 | ShareFixedAtBuildPcdsSameValue[key] = False\r | |
1176 | for Pcd in LibAuto.FixedAtBuildPcds:\r | |
1177 | key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1178 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:\r | |
1179 | continue\r | |
1180 | else:\r | |
1181 | DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]\r | |
1182 | if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r | |
1183 | continue\r | |
1184 | if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:\r | |
1185 | LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]\r | |
1186 | \r | |
1187 | def CollectVariables(self, DynamicPcdSet):\r | |
1188 | \r | |
1189 | VpdRegionSize = 0\r | |
1190 | VpdRegionBase = 0\r | |
1191 | if self.Workspace.FdfFile:\r | |
1192 | FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]\r | |
1193 | for FdRegion in FdDict.RegionList:\r | |
1194 | for item in FdRegion.RegionDataList:\r | |
1195 | if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:\r | |
1196 | VpdRegionSize = FdRegion.Size\r | |
1197 | VpdRegionBase = FdRegion.Offset\r | |
1198 | break\r | |
1199 | \r | |
1200 | \r | |
1201 | VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj._GetSkuIds())\r | |
1202 | VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)\r | |
1203 | VariableInfo.SetVpdRegionOffset(VpdRegionBase)\r | |
1204 | Index = 0\r | |
1205 | for Pcd in DynamicPcdSet:\r | |
1206 | pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1207 | for SkuName in Pcd.SkuInfoList:\r | |
1208 | Sku = Pcd.SkuInfoList[SkuName]\r | |
1209 | SkuId = Sku.SkuId\r | |
1210 | if SkuId is None or SkuId == '':\r | |
1211 | continue\r | |
1212 | if len(Sku.VariableName) > 0:\r | |
1213 | VariableGuidStructure = Sku.VariableGuidValue\r | |
1214 | VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)\r | |
1215 | for StorageName in Sku.DefaultStoreDict:\r | |
1216 | VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName], Pcd.DatumType))\r | |
1217 | Index += 1\r | |
1218 | return VariableInfo\r | |
1219 | \r | |
1220 | def UpdateNVStoreMaxSize(self, OrgVpdFile):\r | |
1221 | if self.VariableInfo:\r | |
1222 | VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)\r | |
1223 | PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]\r | |
1224 | \r | |
1225 | if PcdNvStoreDfBuffer:\r | |
1226 | if os.path.exists(VpdMapFilePath):\r | |
1227 | OrgVpdFile.Read(VpdMapFilePath)\r | |
1228 | PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])\r | |
1229 | NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'\r | |
1230 | else:\r | |
1231 | EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r | |
1232 | \r | |
1233 | NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)\r | |
1234 | default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)\r | |
1235 | maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))\r | |
1236 | var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)\r | |
1237 | \r | |
1238 | if var_data and default_skuobj:\r | |
1239 | default_skuobj.DefaultValue = var_data\r | |
1240 | PcdNvStoreDfBuffer[0].DefaultValue = var_data\r | |
1241 | PcdNvStoreDfBuffer[0].SkuInfoList.clear()\r | |
1242 | PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj\r | |
1243 | PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))\r | |
1244 | \r | |
1245 | return OrgVpdFile\r | |
1246 | \r | |
1247 | ## Collect dynamic PCDs\r | |
1248 | #\r | |
1249 | # Gather dynamic PCDs list from each module and their settings from platform\r | |
1250 | # This interface should be invoked explicitly when platform action is created.\r | |
1251 | #\r | |
1252 | def CollectPlatformDynamicPcds(self):\r | |
1253 | \r | |
1254 | for key in self.Platform.Pcds:\r | |
1255 | for SinglePcd in GlobalData.MixedPcd:\r | |
1256 | if (self.Platform.Pcds[key].TokenCName, self.Platform.Pcds[key].TokenSpaceGuidCName) == SinglePcd:\r | |
1257 | for item in GlobalData.MixedPcd[SinglePcd]:\r | |
1258 | Pcd_Type = item[0].split('_')[-1]\r | |
1259 | if (Pcd_Type == self.Platform.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and self.Platform.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \\r | |
1260 | (Pcd_Type == TAB_PCDS_DYNAMIC and self.Platform.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):\r | |
1261 | Value = self.Platform.Pcds[key]\r | |
1262 | Value.TokenCName = self.Platform.Pcds[key].TokenCName + '_' + Pcd_Type\r | |
1263 | if len(key) == 2:\r | |
1264 | newkey = (Value.TokenCName, key[1])\r | |
1265 | elif len(key) == 3:\r | |
1266 | newkey = (Value.TokenCName, key[1], key[2])\r | |
1267 | del self.Platform.Pcds[key]\r | |
1268 | self.Platform.Pcds[newkey] = Value\r | |
1269 | break\r | |
1270 | break\r | |
1271 | \r | |
1272 | # for gathering error information\r | |
1273 | NoDatumTypePcdList = set()\r | |
1274 | FdfModuleList = []\r | |
1275 | for InfName in self._AsBuildInfList:\r | |
1276 | InfName = mws.join(self.WorkspaceDir, InfName)\r | |
1277 | FdfModuleList.append(os.path.normpath(InfName))\r | |
1278 | for F in self.Platform.Modules.keys():\r | |
1279 | M = ModuleAutoGen(self.Workspace, F, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile)\r | |
1280 | #GuidValue.update(M.Guids)\r | |
1281 | \r | |
1282 | self.Platform.Modules[F].M = M\r | |
1283 | \r | |
1284 | for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:\r | |
1285 | # make sure that the "VOID*" kind of datum has MaxDatumSize set\r | |
1286 | if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:\r | |
1287 | NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))\r | |
1288 | \r | |
1289 | # Check the PCD from Binary INF or Source INF\r | |
1290 | if M.IsBinaryModule == True:\r | |
1291 | PcdFromModule.IsFromBinaryInf = True\r | |
1292 | \r | |
1293 | # Check the PCD from DSC or not\r | |
1294 | PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds\r | |
1295 | \r | |
1296 | if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1297 | if F.Path not in FdfModuleList:\r | |
1298 | # If one of the Source built modules listed in the DSC is not listed\r | |
1299 | # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic\r | |
1300 | # access method (it is only listed in the DEC file that declares the\r | |
1301 | # PCD as PcdsDynamic), then build tool will report warning message\r | |
1302 | # notify the PI that they are attempting to build a module that must\r | |
1303 | # be included in a flash image in order to be functional. These Dynamic\r | |
1304 | # PCD will not be added into the Database unless it is used by other\r | |
1305 | # modules that are included in the FDF file.\r | |
1306 | if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \\r | |
1307 | PcdFromModule.IsFromBinaryInf == False:\r | |
1308 | # Print warning message to let the developer make a determine.\r | |
1309 | continue\r | |
1310 | # If one of the Source built modules listed in the DSC is not listed in\r | |
1311 | # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx\r | |
1312 | # access method (it is only listed in the DEC file that declares the\r | |
1313 | # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the\r | |
1314 | # PCD to the Platform's PCD Database.\r | |
1315 | if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1316 | continue\r | |
1317 | #\r | |
1318 | # If a dynamic PCD used by a PEM module/PEI module & DXE module,\r | |
1319 | # it should be stored in Pcd PEI database, If a dynamic only\r | |
1320 | # used by DXE module, it should be stored in DXE PCD database.\r | |
1321 | # The default Phase is DXE\r | |
1322 | #\r | |
1323 | if M.ModuleType in SUP_MODULE_SET_PEI:\r | |
1324 | PcdFromModule.Phase = "PEI"\r | |
1325 | if PcdFromModule not in self._DynaPcdList_:\r | |
1326 | self._DynaPcdList_.append(PcdFromModule)\r | |
1327 | elif PcdFromModule.Phase == 'PEI':\r | |
1328 | # overwrite any the same PCD existing, if Phase is PEI\r | |
1329 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1330 | self._DynaPcdList_[Index] = PcdFromModule\r | |
1331 | elif PcdFromModule not in self._NonDynaPcdList_:\r | |
1332 | self._NonDynaPcdList_.append(PcdFromModule)\r | |
1333 | elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:\r | |
1334 | Index = self._NonDynaPcdList_.index(PcdFromModule)\r | |
1335 | if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:\r | |
1336 | #The PCD from Binary INF will override the same one from source INF\r | |
1337 | self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])\r | |
1338 | PcdFromModule.Pending = False\r | |
1339 | self._NonDynaPcdList_.append (PcdFromModule)\r | |
1340 | DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}\r | |
1341 | # add the PCD from modules that listed in FDF but not in DSC to Database\r | |
1342 | for InfName in FdfModuleList:\r | |
1343 | if InfName not in DscModuleSet:\r | |
1344 | InfClass = PathClass(InfName)\r | |
1345 | M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1346 | # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)\r | |
1347 | # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.\r | |
1348 | # For binary module, if in current arch, we need to list the PCDs into database.\r | |
1349 | if not M.IsSupportedArch:\r | |
1350 | continue\r | |
1351 | # Override the module PCD setting by platform setting\r | |
1352 | ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)\r | |
1353 | for PcdFromModule in ModulePcdList:\r | |
1354 | PcdFromModule.IsFromBinaryInf = True\r | |
1355 | PcdFromModule.IsFromDsc = False\r | |
1356 | # Only allow the DynamicEx and Patchable PCD in AsBuild INF\r | |
1357 | if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
1358 | EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r | |
1359 | File=self.MetaFile,\r | |
1360 | ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"\r | |
1361 | % (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))\r | |
1362 | # make sure that the "VOID*" kind of datum has MaxDatumSize set\r | |
1363 | if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:\r | |
1364 | NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))\r | |
1365 | if M.ModuleType in SUP_MODULE_SET_PEI:\r | |
1366 | PcdFromModule.Phase = "PEI"\r | |
1367 | if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1368 | self._DynaPcdList_.append(PcdFromModule)\r | |
1369 | elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
1370 | self._NonDynaPcdList_.append(PcdFromModule)\r | |
1371 | if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1372 | # Overwrite the phase of any the same PCD existing, if Phase is PEI.\r | |
1373 | # It is to solve the case that a dynamic PCD used by a PEM module/PEI\r | |
1374 | # module & DXE module at a same time.\r | |
1375 | # Overwrite the type of the PCDs in source INF by the type of AsBuild\r | |
1376 | # INF file as DynamicEx.\r | |
1377 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1378 | self._DynaPcdList_[Index].Phase = PcdFromModule.Phase\r | |
1379 | self._DynaPcdList_[Index].Type = PcdFromModule.Type\r | |
1380 | for PcdFromModule in self._NonDynaPcdList_:\r | |
1381 | # If a PCD is not listed in the DSC file, but binary INF files used by\r | |
1382 | # this platform all (that use this PCD) list the PCD in a [PatchPcds]\r | |
1383 | # section, AND all source INF files used by this platform the build\r | |
1384 | # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]\r | |
1385 | # section, then the tools must NOT add the PCD to the Platform's PCD\r | |
1386 | # Database; the build must assign the access method for this PCD as\r | |
1387 | # PcdsPatchableInModule.\r | |
1388 | if PcdFromModule not in self._DynaPcdList_:\r | |
1389 | continue\r | |
1390 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1391 | if PcdFromModule.IsFromDsc == False and \\r | |
1392 | PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \\r | |
1393 | PcdFromModule.IsFromBinaryInf == True and \\r | |
1394 | self._DynaPcdList_[Index].IsFromBinaryInf == False:\r | |
1395 | Index = self._DynaPcdList_.index(PcdFromModule)\r | |
1396 | self._DynaPcdList_.remove (self._DynaPcdList_[Index])\r | |
1397 | \r | |
1398 | # print out error information and break the build, if error found\r | |
1399 | if len(NoDatumTypePcdList) > 0:\r | |
1400 | NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)\r | |
1401 | EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r | |
1402 | File=self.MetaFile,\r | |
1403 | ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"\r | |
1404 | % NoDatumTypePcdListString)\r | |
1405 | self._NonDynamicPcdList = self._NonDynaPcdList_\r | |
1406 | self._DynamicPcdList = self._DynaPcdList_\r | |
1407 | #\r | |
1408 | # Sort dynamic PCD list to:\r | |
1409 | # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should\r | |
1410 | # try to be put header of dynamicd List\r | |
1411 | # 2) If PCD is HII type, the PCD item should be put after unicode type PCD\r | |
1412 | #\r | |
1413 | # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.\r | |
1414 | #\r | |
1415 | UnicodePcdArray = set()\r | |
1416 | HiiPcdArray = set()\r | |
1417 | OtherPcdArray = set()\r | |
1418 | VpdPcdDict = {}\r | |
1419 | VpdFile = VpdInfoFile.VpdInfoFile()\r | |
1420 | NeedProcessVpdMapFile = False\r | |
1421 | \r | |
1422 | for pcd in self.Platform.Pcds:\r | |
1423 | if pcd not in self._PlatformPcds:\r | |
1424 | self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]\r | |
1425 | \r | |
1426 | for item in self._PlatformPcds:\r | |
1427 | if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r | |
1428 | self._PlatformPcds[item].DatumType = TAB_VOID\r | |
1429 | \r | |
1430 | if (self.Workspace.ArchList[-1] == self.Arch):\r | |
1431 | for Pcd in self._DynamicPcdList:\r | |
1432 | # just pick the a value to determine whether is unicode string type\r | |
1433 | Sku = Pcd.SkuInfoList.values()[0]\r | |
1434 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1435 | \r | |
1436 | if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r | |
1437 | Pcd.DatumType = TAB_VOID\r | |
1438 | \r | |
1439 | # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r | |
1440 | # if found HII type PCD then insert to right of UnicodeIndex\r | |
1441 | if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r | |
1442 | VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd\r | |
1443 | \r | |
1444 | #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer\r | |
1445 | PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))\r | |
1446 | if PcdNvStoreDfBuffer:\r | |
1447 | self.VariableInfo = self.CollectVariables(self._DynamicPcdList)\r | |
1448 | vardump = self.VariableInfo.dump()\r | |
1449 | if vardump:\r | |
1450 | PcdNvStoreDfBuffer.DefaultValue = vardump\r | |
1451 | for skuname in PcdNvStoreDfBuffer.SkuInfoList:\r | |
1452 | PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump\r | |
1453 | PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))\r | |
1454 | \r | |
1455 | PlatformPcds = sorted(self._PlatformPcds.keys())\r | |
1456 | #\r | |
1457 | # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.\r | |
1458 | #\r | |
1459 | VpdSkuMap = {}\r | |
1460 | for PcdKey in PlatformPcds:\r | |
1461 | Pcd = self._PlatformPcds[PcdKey]\r | |
1462 | if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \\r | |
1463 | PcdKey in VpdPcdDict:\r | |
1464 | Pcd = VpdPcdDict[PcdKey]\r | |
1465 | SkuValueMap = {}\r | |
1466 | DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)\r | |
1467 | if DefaultSku:\r | |
1468 | PcdValue = DefaultSku.DefaultValue\r | |
1469 | if PcdValue not in SkuValueMap:\r | |
1470 | SkuValueMap[PcdValue] = []\r | |
1471 | VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)\r | |
1472 | SkuValueMap[PcdValue].append(DefaultSku)\r | |
1473 | \r | |
1474 | for (SkuName, Sku) in Pcd.SkuInfoList.items():\r | |
1475 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1476 | PcdValue = Sku.DefaultValue\r | |
1477 | if PcdValue == "":\r | |
1478 | PcdValue = Pcd.DefaultValue\r | |
1479 | if Sku.VpdOffset != '*':\r | |
1480 | if PcdValue.startswith("{"):\r | |
1481 | Alignment = 8\r | |
1482 | elif PcdValue.startswith("L"):\r | |
1483 | Alignment = 2\r | |
1484 | else:\r | |
1485 | Alignment = 1\r | |
1486 | try:\r | |
1487 | VpdOffset = int(Sku.VpdOffset)\r | |
1488 | except:\r | |
1489 | try:\r | |
1490 | VpdOffset = int(Sku.VpdOffset, 16)\r | |
1491 | except:\r | |
1492 | EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))\r | |
1493 | if VpdOffset % Alignment != 0:\r | |
1494 | if PcdValue.startswith("{"):\r | |
1495 | EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)\r | |
1496 | else:\r | |
1497 | EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))\r | |
1498 | if PcdValue not in SkuValueMap:\r | |
1499 | SkuValueMap[PcdValue] = []\r | |
1500 | VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)\r | |
1501 | SkuValueMap[PcdValue].append(Sku)\r | |
1502 | # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r | |
1503 | if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r | |
1504 | NeedProcessVpdMapFile = True\r | |
1505 | if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':\r | |
1506 | EdkLogger.error("Build", FILE_NOT_FOUND, \\r | |
1507 | "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r | |
1508 | \r | |
1509 | VpdSkuMap[PcdKey] = SkuValueMap\r | |
1510 | #\r | |
1511 | # Fix the PCDs define in VPD PCD section that never referenced by module.\r | |
1512 | # An example is PCD for signature usage.\r | |
1513 | #\r | |
1514 | for DscPcd in PlatformPcds:\r | |
1515 | DscPcdEntry = self._PlatformPcds[DscPcd]\r | |
1516 | if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r | |
1517 | if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):\r | |
1518 | FoundFlag = False\r | |
1519 | for VpdPcd in VpdFile._VpdArray:\r | |
1520 | # This PCD has been referenced by module\r | |
1521 | if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r | |
1522 | (VpdPcd.TokenCName == DscPcdEntry.TokenCName):\r | |
1523 | FoundFlag = True\r | |
1524 | \r | |
1525 | # Not found, it should be signature\r | |
1526 | if not FoundFlag :\r | |
1527 | # just pick the a value to determine whether is unicode string type\r | |
1528 | SkuValueMap = {}\r | |
1529 | SkuObjList = DscPcdEntry.SkuInfoList.items()\r | |
1530 | DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)\r | |
1531 | if DefaultSku:\r | |
1532 | defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))\r | |
1533 | SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]\r | |
1534 | for (SkuName, Sku) in SkuObjList:\r | |
1535 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1536 | \r | |
1537 | # Need to iterate DEC pcd information to get the value & datumtype\r | |
1538 | for eachDec in self.PackageList:\r | |
1539 | for DecPcd in eachDec.Pcds:\r | |
1540 | DecPcdEntry = eachDec.Pcds[DecPcd]\r | |
1541 | if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r | |
1542 | (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):\r | |
1543 | # Print warning message to let the developer make a determine.\r | |
1544 | EdkLogger.warn("build", "Unreferenced vpd pcd used!",\r | |
1545 | File=self.MetaFile, \\r | |
1546 | ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \\r | |
1547 | %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))\r | |
1548 | \r | |
1549 | DscPcdEntry.DatumType = DecPcdEntry.DatumType\r | |
1550 | DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue\r | |
1551 | DscPcdEntry.TokenValue = DecPcdEntry.TokenValue\r | |
1552 | DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]\r | |
1553 | # Only fix the value while no value provided in DSC file.\r | |
1554 | if not Sku.DefaultValue:\r | |
1555 | DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue\r | |
1556 | \r | |
1557 | if DscPcdEntry not in self._DynamicPcdList:\r | |
1558 | self._DynamicPcdList.append(DscPcdEntry)\r | |
1559 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1560 | PcdValue = Sku.DefaultValue\r | |
1561 | if PcdValue == "":\r | |
1562 | PcdValue = DscPcdEntry.DefaultValue\r | |
1563 | if Sku.VpdOffset != '*':\r | |
1564 | if PcdValue.startswith("{"):\r | |
1565 | Alignment = 8\r | |
1566 | elif PcdValue.startswith("L"):\r | |
1567 | Alignment = 2\r | |
1568 | else:\r | |
1569 | Alignment = 1\r | |
1570 | try:\r | |
1571 | VpdOffset = int(Sku.VpdOffset)\r | |
1572 | except:\r | |
1573 | try:\r | |
1574 | VpdOffset = int(Sku.VpdOffset, 16)\r | |
1575 | except:\r | |
1576 | EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))\r | |
1577 | if VpdOffset % Alignment != 0:\r | |
1578 | if PcdValue.startswith("{"):\r | |
1579 | EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)\r | |
1580 | else:\r | |
1581 | EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))\r | |
1582 | if PcdValue not in SkuValueMap:\r | |
1583 | SkuValueMap[PcdValue] = []\r | |
1584 | VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)\r | |
1585 | SkuValueMap[PcdValue].append(Sku)\r | |
1586 | if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r | |
1587 | NeedProcessVpdMapFile = True\r | |
1588 | if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):\r | |
1589 | UnicodePcdArray.add(DscPcdEntry)\r | |
1590 | elif len(Sku.VariableName) > 0:\r | |
1591 | HiiPcdArray.add(DscPcdEntry)\r | |
1592 | else:\r | |
1593 | OtherPcdArray.add(DscPcdEntry)\r | |
1594 | \r | |
1595 | # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r | |
1596 | VpdSkuMap[DscPcd] = SkuValueMap\r | |
1597 | if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \\r | |
1598 | VpdFile.GetCount() != 0:\r | |
1599 | EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,\r | |
1600 | "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r | |
1601 | \r | |
1602 | if VpdFile.GetCount() != 0:\r | |
1603 | \r | |
1604 | self.FixVpdOffset(VpdFile)\r | |
1605 | \r | |
1606 | self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))\r | |
1607 | \r | |
1608 | # Process VPD map file generated by third party BPDG tool\r | |
1609 | if NeedProcessVpdMapFile:\r | |
1610 | VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)\r | |
1611 | if os.path.exists(VpdMapFilePath):\r | |
1612 | VpdFile.Read(VpdMapFilePath)\r | |
1613 | \r | |
1614 | # Fixup "*" offset\r | |
1615 | for pcd in VpdSkuMap:\r | |
1616 | vpdinfo = VpdFile.GetVpdInfo(pcd)\r | |
1617 | if vpdinfo is None:\r | |
1618 | # just pick the a value to determine whether is unicode string type\r | |
1619 | continue\r | |
1620 | for pcdvalue in VpdSkuMap[pcd]:\r | |
1621 | for sku in VpdSkuMap[pcd][pcdvalue]:\r | |
1622 | for item in vpdinfo:\r | |
1623 | if item[2] == pcdvalue:\r | |
1624 | sku.VpdOffset = item[1]\r | |
1625 | else:\r | |
1626 | EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r | |
1627 | \r | |
1628 | # Delete the DynamicPcdList At the last time enter into this function\r | |
1629 | for Pcd in self._DynamicPcdList:\r | |
1630 | # just pick the a value to determine whether is unicode string type\r | |
1631 | Sku = Pcd.SkuInfoList.values()[0]\r | |
1632 | Sku.VpdOffset = Sku.VpdOffset.strip()\r | |
1633 | \r | |
1634 | if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r | |
1635 | Pcd.DatumType = TAB_VOID\r | |
1636 | \r | |
1637 | PcdValue = Sku.DefaultValue\r | |
1638 | if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):\r | |
1639 | # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r | |
1640 | UnicodePcdArray.add(Pcd)\r | |
1641 | elif len(Sku.VariableName) > 0:\r | |
1642 | # if found HII type PCD then insert to right of UnicodeIndex\r | |
1643 | HiiPcdArray.add(Pcd)\r | |
1644 | else:\r | |
1645 | OtherPcdArray.add(Pcd)\r | |
1646 | del self._DynamicPcdList[:]\r | |
1647 | self._DynamicPcdList.extend(list(UnicodePcdArray))\r | |
1648 | self._DynamicPcdList.extend(list(HiiPcdArray))\r | |
1649 | self._DynamicPcdList.extend(list(OtherPcdArray))\r | |
1650 | allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]\r | |
1651 | for pcd in self._DynamicPcdList:\r | |
1652 | if len(pcd.SkuInfoList) == 1:\r | |
1653 | for (SkuName, SkuId) in allskuset:\r | |
1654 | if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:\r | |
1655 | continue\r | |
1656 | pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r | |
1657 | pcd.SkuInfoList[SkuName].SkuId = SkuId\r | |
1658 | self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList\r | |
1659 | \r | |
1660 | def FixVpdOffset(self, VpdFile ):\r | |
1661 | FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)\r | |
1662 | if not os.path.exists(FvPath):\r | |
1663 | try:\r | |
1664 | os.makedirs(FvPath)\r | |
1665 | except:\r | |
1666 | EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r | |
1667 | \r | |
1668 | VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r | |
1669 | \r | |
1670 | if VpdFile.Write(VpdFilePath):\r | |
1671 | # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r | |
1672 | BPDGToolName = None\r | |
1673 | for ToolDef in self.ToolDefinition.values():\r | |
1674 | if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:\r | |
1675 | if "PATH" not in ToolDef:\r | |
1676 | EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)\r | |
1677 | BPDGToolName = ToolDef["PATH"]\r | |
1678 | break\r | |
1679 | # Call third party GUID BPDG tool.\r | |
1680 | if BPDGToolName is not None:\r | |
1681 | VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r | |
1682 | else:\r | |
1683 | EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r | |
1684 | \r | |
1685 | ## Return the platform build data object\r | |
1686 | def _GetPlatform(self):\r | |
1687 | if self._Platform is None:\r | |
1688 | self._Platform = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1689 | return self._Platform\r | |
1690 | \r | |
1691 | ## Return platform name\r | |
1692 | def _GetName(self):\r | |
1693 | return self.Platform.PlatformName\r | |
1694 | \r | |
1695 | ## Return the meta file GUID\r | |
1696 | def _GetGuid(self):\r | |
1697 | return self.Platform.Guid\r | |
1698 | \r | |
1699 | ## Return the platform version\r | |
1700 | def _GetVersion(self):\r | |
1701 | return self.Platform.Version\r | |
1702 | \r | |
1703 | ## Return the FDF file name\r | |
1704 | def _GetFdfFile(self):\r | |
1705 | if self._FdfFile is None:\r | |
1706 | if self.Workspace.FdfFile != "":\r | |
1707 | self._FdfFile= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)\r | |
1708 | else:\r | |
1709 | self._FdfFile = ''\r | |
1710 | return self._FdfFile\r | |
1711 | \r | |
1712 | ## Return the build output directory platform specifies\r | |
1713 | def _GetOutputDir(self):\r | |
1714 | return self.Platform.OutputDirectory\r | |
1715 | \r | |
1716 | ## Return the directory to store all intermediate and final files built\r | |
1717 | def _GetBuildDir(self):\r | |
1718 | if self._BuildDir is None:\r | |
1719 | if os.path.isabs(self.OutputDir):\r | |
1720 | self._BuildDir = path.join(\r | |
1721 | path.abspath(self.OutputDir),\r | |
1722 | self.BuildTarget + "_" + self.ToolChain,\r | |
1723 | )\r | |
1724 | else:\r | |
1725 | self._BuildDir = path.join(\r | |
1726 | self.WorkspaceDir,\r | |
1727 | self.OutputDir,\r | |
1728 | self.BuildTarget + "_" + self.ToolChain,\r | |
1729 | )\r | |
1730 | GlobalData.gBuildDirectory = self._BuildDir\r | |
1731 | return self._BuildDir\r | |
1732 | \r | |
1733 | ## Return directory of platform makefile\r | |
1734 | #\r | |
1735 | # @retval string Makefile directory\r | |
1736 | #\r | |
1737 | def _GetMakeFileDir(self):\r | |
1738 | if self._MakeFileDir is None:\r | |
1739 | self._MakeFileDir = path.join(self.BuildDir, self.Arch)\r | |
1740 | return self._MakeFileDir\r | |
1741 | \r | |
1742 | ## Return build command string\r | |
1743 | #\r | |
1744 | # @retval string Build command string\r | |
1745 | #\r | |
1746 | def _GetBuildCommand(self):\r | |
1747 | if self._BuildCommand is None:\r | |
1748 | self._BuildCommand = []\r | |
1749 | if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:\r | |
1750 | self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r | |
1751 | if "FLAGS" in self.ToolDefinition["MAKE"]:\r | |
1752 | NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()\r | |
1753 | if NewOption != '':\r | |
1754 | self._BuildCommand += SplitOption(NewOption)\r | |
1755 | if "MAKE" in self.EdkIIBuildOption:\r | |
1756 | if "FLAGS" in self.EdkIIBuildOption["MAKE"]:\r | |
1757 | Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]\r | |
1758 | if Flags.startswith('='):\r | |
1759 | self._BuildCommand = [self._BuildCommand[0]] + [Flags[1:]]\r | |
1760 | else:\r | |
1761 | self._BuildCommand.append(Flags)\r | |
1762 | return self._BuildCommand\r | |
1763 | \r | |
1764 | ## Get tool chain definition\r | |
1765 | #\r | |
1766 | # Get each tool defition for given tool chain from tools_def.txt and platform\r | |
1767 | #\r | |
1768 | def _GetToolDefinition(self):\r | |
1769 | if self._ToolDefinitions is None:\r | |
1770 | ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary\r | |
1771 | if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:\r | |
1772 | EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",\r | |
1773 | ExtraData="[%s]" % self.MetaFile)\r | |
1774 | self._ToolDefinitions = {}\r | |
1775 | DllPathList = set()\r | |
1776 | for Def in ToolDefinition:\r | |
1777 | Target, Tag, Arch, Tool, Attr = Def.split("_")\r | |
1778 | if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:\r | |
1779 | continue\r | |
1780 | \r | |
1781 | Value = ToolDefinition[Def]\r | |
1782 | # don't record the DLL\r | |
1783 | if Attr == "DLL":\r | |
1784 | DllPathList.add(Value)\r | |
1785 | continue\r | |
1786 | \r | |
1787 | if Tool not in self._ToolDefinitions:\r | |
1788 | self._ToolDefinitions[Tool] = {}\r | |
1789 | self._ToolDefinitions[Tool][Attr] = Value\r | |
1790 | \r | |
1791 | ToolsDef = ''\r | |
1792 | if GlobalData.gOptions.SilentMode and "MAKE" in self._ToolDefinitions:\r | |
1793 | if "FLAGS" not in self._ToolDefinitions["MAKE"]:\r | |
1794 | self._ToolDefinitions["MAKE"]["FLAGS"] = ""\r | |
1795 | self._ToolDefinitions["MAKE"]["FLAGS"] += " -s"\r | |
1796 | MakeFlags = ''\r | |
1797 | for Tool in self._ToolDefinitions:\r | |
1798 | for Attr in self._ToolDefinitions[Tool]:\r | |
1799 | Value = self._ToolDefinitions[Tool][Attr]\r | |
1800 | if Tool in self.BuildOption and Attr in self.BuildOption[Tool]:\r | |
1801 | # check if override is indicated\r | |
1802 | if self.BuildOption[Tool][Attr].startswith('='):\r | |
1803 | Value = self.BuildOption[Tool][Attr][1:]\r | |
1804 | else:\r | |
1805 | if Attr != 'PATH':\r | |
1806 | Value += " " + self.BuildOption[Tool][Attr]\r | |
1807 | else:\r | |
1808 | Value = self.BuildOption[Tool][Attr]\r | |
1809 | \r | |
1810 | if Attr == "PATH":\r | |
1811 | # Don't put MAKE definition in the file\r | |
1812 | if Tool != "MAKE":\r | |
1813 | ToolsDef += "%s = %s\n" % (Tool, Value)\r | |
1814 | elif Attr != "DLL":\r | |
1815 | # Don't put MAKE definition in the file\r | |
1816 | if Tool == "MAKE":\r | |
1817 | if Attr == "FLAGS":\r | |
1818 | MakeFlags = Value\r | |
1819 | else:\r | |
1820 | ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)\r | |
1821 | ToolsDef += "\n"\r | |
1822 | \r | |
1823 | SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)\r | |
1824 | for DllPath in DllPathList:\r | |
1825 | os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]\r | |
1826 | os.environ["MAKE_FLAGS"] = MakeFlags\r | |
1827 | \r | |
1828 | return self._ToolDefinitions\r | |
1829 | \r | |
1830 | ## Return the paths of tools\r | |
1831 | def _GetToolDefFile(self):\r | |
1832 | if self._ToolDefFile is None:\r | |
1833 | self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)\r | |
1834 | return self._ToolDefFile\r | |
1835 | \r | |
1836 | ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.\r | |
1837 | def _GetToolChainFamily(self):\r | |
1838 | if self._ToolChainFamily is None:\r | |
1839 | ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r | |
1840 | if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \\r | |
1841 | or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \\r | |
1842 | or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:\r | |
1843 | EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \\r | |
1844 | % self.ToolChain)\r | |
1845 | self._ToolChainFamily = TAB_COMPILER_MSFT\r | |
1846 | else:\r | |
1847 | self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]\r | |
1848 | return self._ToolChainFamily\r | |
1849 | \r | |
1850 | def _GetBuildRuleFamily(self):\r | |
1851 | if self._BuildRuleFamily is None:\r | |
1852 | ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r | |
1853 | if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \\r | |
1854 | or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \\r | |
1855 | or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:\r | |
1856 | EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \\r | |
1857 | % self.ToolChain)\r | |
1858 | self._BuildRuleFamily = TAB_COMPILER_MSFT\r | |
1859 | else:\r | |
1860 | self._BuildRuleFamily = ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]\r | |
1861 | return self._BuildRuleFamily\r | |
1862 | \r | |
1863 | ## Return the build options specific for all modules in this platform\r | |
1864 | def _GetBuildOptions(self):\r | |
1865 | if self._BuildOption is None:\r | |
1866 | self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)\r | |
1867 | return self._BuildOption\r | |
1868 | \r | |
1869 | ## Return the build options specific for EDK modules in this platform\r | |
1870 | def _GetEdkBuildOptions(self):\r | |
1871 | if self._EdkBuildOption is None:\r | |
1872 | self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)\r | |
1873 | return self._EdkBuildOption\r | |
1874 | \r | |
1875 | ## Return the build options specific for EDKII modules in this platform\r | |
1876 | def _GetEdkIIBuildOptions(self):\r | |
1877 | if self._EdkIIBuildOption is None:\r | |
1878 | self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)\r | |
1879 | return self._EdkIIBuildOption\r | |
1880 | \r | |
1881 | ## Parse build_rule.txt in Conf Directory.\r | |
1882 | #\r | |
1883 | # @retval BuildRule object\r | |
1884 | #\r | |
1885 | def _GetBuildRule(self):\r | |
1886 | if self._BuildRule is None:\r | |
1887 | BuildRuleFile = None\r | |
1888 | if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:\r | |
1889 | BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]\r | |
1890 | if not BuildRuleFile:\r | |
1891 | BuildRuleFile = gDefaultBuildRuleFile\r | |
1892 | self._BuildRule = BuildRule(BuildRuleFile)\r | |
1893 | if self._BuildRule._FileVersion == "":\r | |
1894 | self._BuildRule._FileVersion = AutoGenReqBuildRuleVerNum\r | |
1895 | else:\r | |
1896 | if self._BuildRule._FileVersion < AutoGenReqBuildRuleVerNum :\r | |
1897 | # If Build Rule's version is less than the version number required by the tools, halting the build.\r | |
1898 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1899 | ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\\r | |
1900 | % (self._BuildRule._FileVersion, AutoGenReqBuildRuleVerNum))\r | |
1901 | \r | |
1902 | return self._BuildRule\r | |
1903 | \r | |
1904 | ## Summarize the packages used by modules in this platform\r | |
1905 | def _GetPackageList(self):\r | |
1906 | if self._PackageList is None:\r | |
1907 | self._PackageList = set()\r | |
1908 | for La in self.LibraryAutoGenList:\r | |
1909 | self._PackageList.update(La.DependentPackageList)\r | |
1910 | for Ma in self.ModuleAutoGenList:\r | |
1911 | self._PackageList.update(Ma.DependentPackageList)\r | |
1912 | #Collect package set information from INF of FDF\r | |
1913 | PkgSet = set()\r | |
1914 | for ModuleFile in self._AsBuildModuleList:\r | |
1915 | if ModuleFile in self.Platform.Modules:\r | |
1916 | continue\r | |
1917 | ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1918 | PkgSet.update(ModuleData.Packages)\r | |
1919 | self._PackageList = list(self._PackageList) + list (PkgSet)\r | |
1920 | return self._PackageList\r | |
1921 | \r | |
1922 | def _GetNonDynamicPcdDict(self):\r | |
1923 | if self._NonDynamicPcdDict:\r | |
1924 | return self._NonDynamicPcdDict\r | |
1925 | for Pcd in self.NonDynamicPcdList:\r | |
1926 | self._NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd\r | |
1927 | return self._NonDynamicPcdDict\r | |
1928 | \r | |
1929 | ## Get list of non-dynamic PCDs\r | |
1930 | def _GetNonDynamicPcdList(self):\r | |
1931 | if self._NonDynamicPcdList is None:\r | |
1932 | self.CollectPlatformDynamicPcds()\r | |
1933 | return self._NonDynamicPcdList\r | |
1934 | \r | |
1935 | ## Get list of dynamic PCDs\r | |
1936 | def _GetDynamicPcdList(self):\r | |
1937 | if self._DynamicPcdList is None:\r | |
1938 | self.CollectPlatformDynamicPcds()\r | |
1939 | return self._DynamicPcdList\r | |
1940 | \r | |
1941 | ## Generate Token Number for all PCD\r | |
1942 | def _GetPcdTokenNumbers(self):\r | |
1943 | if self._PcdTokenNumber is None:\r | |
1944 | self._PcdTokenNumber = OrderedDict()\r | |
1945 | TokenNumber = 1\r | |
1946 | #\r | |
1947 | # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.\r | |
1948 | # Such as:\r | |
1949 | #\r | |
1950 | # Dynamic PCD:\r | |
1951 | # TokenNumber 0 ~ 10\r | |
1952 | # DynamicEx PCD:\r | |
1953 | # TokeNumber 11 ~ 20\r | |
1954 | #\r | |
1955 | for Pcd in self.DynamicPcdList:\r | |
1956 | if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:\r | |
1957 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1958 | self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1959 | TokenNumber += 1\r | |
1960 | \r | |
1961 | for Pcd in self.DynamicPcdList:\r | |
1962 | if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1963 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1964 | self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1965 | TokenNumber += 1\r | |
1966 | \r | |
1967 | for Pcd in self.DynamicPcdList:\r | |
1968 | if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:\r | |
1969 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1970 | self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1971 | TokenNumber += 1\r | |
1972 | \r | |
1973 | for Pcd in self.DynamicPcdList:\r | |
1974 | if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1975 | EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r | |
1976 | self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1977 | TokenNumber += 1\r | |
1978 | \r | |
1979 | for Pcd in self.NonDynamicPcdList:\r | |
1980 | self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r | |
1981 | TokenNumber += 1\r | |
1982 | return self._PcdTokenNumber\r | |
1983 | \r | |
1984 | ## Summarize ModuleAutoGen objects of all modules/libraries to be built for this platform\r | |
1985 | def _GetAutoGenObjectList(self):\r | |
1986 | self._ModuleAutoGenList = []\r | |
1987 | self._LibraryAutoGenList = []\r | |
1988 | for ModuleFile in self.Platform.Modules:\r | |
1989 | Ma = ModuleAutoGen(\r | |
1990 | self.Workspace,\r | |
1991 | ModuleFile,\r | |
1992 | self.BuildTarget,\r | |
1993 | self.ToolChain,\r | |
1994 | self.Arch,\r | |
1995 | self.MetaFile\r | |
1996 | )\r | |
1997 | if Ma not in self._ModuleAutoGenList:\r | |
1998 | self._ModuleAutoGenList.append(Ma)\r | |
1999 | for La in Ma.LibraryAutoGenList:\r | |
2000 | if La not in self._LibraryAutoGenList:\r | |
2001 | self._LibraryAutoGenList.append(La)\r | |
2002 | if Ma not in La.ReferenceModules:\r | |
2003 | La.ReferenceModules.append(Ma)\r | |
2004 | \r | |
2005 | ## Summarize ModuleAutoGen objects of all modules to be built for this platform\r | |
2006 | def _GetModuleAutoGenList(self):\r | |
2007 | if self._ModuleAutoGenList is None:\r | |
2008 | self._GetAutoGenObjectList()\r | |
2009 | return self._ModuleAutoGenList\r | |
2010 | \r | |
2011 | ## Summarize ModuleAutoGen objects of all libraries to be built for this platform\r | |
2012 | def _GetLibraryAutoGenList(self):\r | |
2013 | if self._LibraryAutoGenList is None:\r | |
2014 | self._GetAutoGenObjectList()\r | |
2015 | return self._LibraryAutoGenList\r | |
2016 | \r | |
2017 | ## Test if a module is supported by the platform\r | |
2018 | #\r | |
2019 | # An error will be raised directly if the module or its arch is not supported\r | |
2020 | # by the platform or current configuration\r | |
2021 | #\r | |
2022 | def ValidModule(self, Module):\r | |
2023 | return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \\r | |
2024 | or Module in self._AsBuildModuleList\r | |
2025 | \r | |
2026 | ## Resolve the library classes in a module to library instances\r | |
2027 | #\r | |
2028 | # This method will not only resolve library classes but also sort the library\r | |
2029 | # instances according to the dependency-ship.\r | |
2030 | #\r | |
2031 | # @param Module The module from which the library classes will be resolved\r | |
2032 | #\r | |
2033 | # @retval library_list List of library instances sorted\r | |
2034 | #\r | |
2035 | def ApplyLibraryInstance(self, Module):\r | |
2036 | # Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly\r | |
2037 | if str(Module) not in self.Platform.Modules:\r | |
2038 | return []\r | |
2039 | \r | |
2040 | return GetModuleLibInstances(Module,\r | |
2041 | self.Platform,\r | |
2042 | self.BuildDatabase,\r | |
2043 | self.Arch,\r | |
2044 | self.BuildTarget,\r | |
2045 | self.ToolChain,\r | |
2046 | self.MetaFile,\r | |
2047 | EdkLogger)\r | |
2048 | \r | |
2049 | ## Override PCD setting (type, value, ...)\r | |
2050 | #\r | |
2051 | # @param ToPcd The PCD to be overrided\r | |
2052 | # @param FromPcd The PCD overrideing from\r | |
2053 | #\r | |
2054 | def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):\r | |
2055 | #\r | |
2056 | # in case there's PCDs coming from FDF file, which have no type given.\r | |
2057 | # at this point, ToPcd.Type has the type found from dependent\r | |
2058 | # package\r | |
2059 | #\r | |
2060 | TokenCName = ToPcd.TokenCName\r | |
2061 | for PcdItem in GlobalData.MixedPcd:\r | |
2062 | if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
2063 | TokenCName = PcdItem[0]\r | |
2064 | break\r | |
2065 | if FromPcd is not None:\r | |
2066 | if ToPcd.Pending and FromPcd.Type:\r | |
2067 | ToPcd.Type = FromPcd.Type\r | |
2068 | elif ToPcd.Type and FromPcd.Type\\r | |
2069 | and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:\r | |
2070 | if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:\r | |
2071 | ToPcd.Type = FromPcd.Type\r | |
2072 | elif ToPcd.Type and FromPcd.Type \\r | |
2073 | and ToPcd.Type != FromPcd.Type:\r | |
2074 | if Library:\r | |
2075 | Module = str(Module) + " 's library file (" + str(Library) + ")"\r | |
2076 | EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",\r | |
2077 | ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\\r | |
2078 | % (ToPcd.TokenSpaceGuidCName, TokenCName,\r | |
2079 | ToPcd.Type, Module, FromPcd.Type, Msg),\r | |
2080 | File=self.MetaFile)\r | |
2081 | \r | |
2082 | if FromPcd.MaxDatumSize:\r | |
2083 | ToPcd.MaxDatumSize = FromPcd.MaxDatumSize\r | |
2084 | ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize\r | |
2085 | if FromPcd.DefaultValue:\r | |
2086 | ToPcd.DefaultValue = FromPcd.DefaultValue\r | |
2087 | if FromPcd.TokenValue:\r | |
2088 | ToPcd.TokenValue = FromPcd.TokenValue\r | |
2089 | if FromPcd.DatumType:\r | |
2090 | ToPcd.DatumType = FromPcd.DatumType\r | |
2091 | if FromPcd.SkuInfoList:\r | |
2092 | ToPcd.SkuInfoList = FromPcd.SkuInfoList\r | |
2093 | # Add Flexible PCD format parse\r | |
2094 | if ToPcd.DefaultValue:\r | |
2095 | try:\r | |
2096 | ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self._GuidDict)(True)\r | |
2097 | except BadExpression as Value:\r | |
2098 | EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),\r | |
2099 | File=self.MetaFile)\r | |
2100 | \r | |
2101 | # check the validation of datum\r | |
2102 | IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)\r | |
2103 | if not IsValid:\r | |
2104 | EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,\r | |
2105 | ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))\r | |
2106 | ToPcd.validateranges = FromPcd.validateranges\r | |
2107 | ToPcd.validlists = FromPcd.validlists\r | |
2108 | ToPcd.expressions = FromPcd.expressions\r | |
2109 | \r | |
2110 | if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:\r | |
2111 | EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \\r | |
2112 | % (ToPcd.TokenSpaceGuidCName, TokenCName))\r | |
2113 | Value = ToPcd.DefaultValue\r | |
2114 | if not Value:\r | |
2115 | ToPcd.MaxDatumSize = '1'\r | |
2116 | elif Value[0] == 'L':\r | |
2117 | ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)\r | |
2118 | elif Value[0] == '{':\r | |
2119 | ToPcd.MaxDatumSize = str(len(Value.split(',')))\r | |
2120 | else:\r | |
2121 | ToPcd.MaxDatumSize = str(len(Value) - 1)\r | |
2122 | \r | |
2123 | # apply default SKU for dynamic PCDS if specified one is not available\r | |
2124 | if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \\r | |
2125 | and not ToPcd.SkuInfoList:\r | |
2126 | if self.Platform.SkuName in self.Platform.SkuIds:\r | |
2127 | SkuName = self.Platform.SkuName\r | |
2128 | else:\r | |
2129 | SkuName = TAB_DEFAULT\r | |
2130 | ToPcd.SkuInfoList = {\r | |
2131 | SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)\r | |
2132 | }\r | |
2133 | \r | |
2134 | ## Apply PCD setting defined platform to a module\r | |
2135 | #\r | |
2136 | # @param Module The module from which the PCD setting will be overrided\r | |
2137 | #\r | |
2138 | # @retval PCD_list The list PCDs with settings from platform\r | |
2139 | #\r | |
2140 | def ApplyPcdSetting(self, Module, Pcds, Library=""):\r | |
2141 | # for each PCD in module\r | |
2142 | for Name, Guid in Pcds:\r | |
2143 | PcdInModule = Pcds[Name, Guid]\r | |
2144 | # find out the PCD setting in platform\r | |
2145 | if (Name, Guid) in self.Platform.Pcds:\r | |
2146 | PcdInPlatform = self.Platform.Pcds[Name, Guid]\r | |
2147 | else:\r | |
2148 | PcdInPlatform = None\r | |
2149 | # then override the settings if any\r | |
2150 | self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)\r | |
2151 | # resolve the VariableGuid value\r | |
2152 | for SkuId in PcdInModule.SkuInfoList:\r | |
2153 | Sku = PcdInModule.SkuInfoList[SkuId]\r | |
2154 | if Sku.VariableGuid == '': continue\r | |
2155 | Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)\r | |
2156 | if Sku.VariableGuidValue is None:\r | |
2157 | PackageList = "\n\t".join(str(P) for P in self.PackageList)\r | |
2158 | EdkLogger.error(\r | |
2159 | 'build',\r | |
2160 | RESOURCE_NOT_AVAILABLE,\r | |
2161 | "Value of GUID [%s] is not found in" % Sku.VariableGuid,\r | |
2162 | ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \\r | |
2163 | % (Guid, Name, str(Module)),\r | |
2164 | File=self.MetaFile\r | |
2165 | )\r | |
2166 | \r | |
2167 | # override PCD settings with module specific setting\r | |
2168 | if Module in self.Platform.Modules:\r | |
2169 | PlatformModule = self.Platform.Modules[str(Module)]\r | |
2170 | for Key in PlatformModule.Pcds:\r | |
2171 | Flag = False\r | |
2172 | if Key in Pcds:\r | |
2173 | ToPcd = Pcds[Key]\r | |
2174 | Flag = True\r | |
2175 | elif Key in GlobalData.MixedPcd:\r | |
2176 | for PcdItem in GlobalData.MixedPcd[Key]:\r | |
2177 | if PcdItem in Pcds:\r | |
2178 | ToPcd = Pcds[PcdItem]\r | |
2179 | Flag = True\r | |
2180 | break\r | |
2181 | if Flag:\r | |
2182 | self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)\r | |
2183 | # use PCD value to calculate the MaxDatumSize when it is not specified\r | |
2184 | for Name, Guid in Pcds:\r | |
2185 | Pcd = Pcds[Name, Guid]\r | |
2186 | if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:\r | |
2187 | Pcd.MaxSizeUserSet = None\r | |
2188 | Value = Pcd.DefaultValue\r | |
2189 | if not Value:\r | |
2190 | Pcd.MaxDatumSize = '1'\r | |
2191 | elif Value[0] == 'L':\r | |
2192 | Pcd.MaxDatumSize = str((len(Value) - 2) * 2)\r | |
2193 | elif Value[0] == '{':\r | |
2194 | Pcd.MaxDatumSize = str(len(Value.split(',')))\r | |
2195 | else:\r | |
2196 | Pcd.MaxDatumSize = str(len(Value) - 1)\r | |
2197 | return Pcds.values()\r | |
2198 | \r | |
2199 | ## Resolve library names to library modules\r | |
2200 | #\r | |
2201 | # (for Edk.x modules)\r | |
2202 | #\r | |
2203 | # @param Module The module from which the library names will be resolved\r | |
2204 | #\r | |
2205 | # @retval library_list The list of library modules\r | |
2206 | #\r | |
2207 | def ResolveLibraryReference(self, Module):\r | |
2208 | EdkLogger.verbose("")\r | |
2209 | EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))\r | |
2210 | LibraryConsumerList = [Module]\r | |
2211 | \r | |
2212 | # "CompilerStub" is a must for Edk modules\r | |
2213 | if Module.Libraries:\r | |
2214 | Module.Libraries.append("CompilerStub")\r | |
2215 | LibraryList = []\r | |
2216 | while len(LibraryConsumerList) > 0:\r | |
2217 | M = LibraryConsumerList.pop()\r | |
2218 | for LibraryName in M.Libraries:\r | |
2219 | Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']\r | |
2220 | if Library is None:\r | |
2221 | for Key in self.Platform.LibraryClasses.data:\r | |
2222 | if LibraryName.upper() == Key.upper():\r | |
2223 | Library = self.Platform.LibraryClasses[Key, ':dummy:']\r | |
2224 | break\r | |
2225 | if Library is None:\r | |
2226 | EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),\r | |
2227 | ExtraData="\t%s [%s]" % (str(Module), self.Arch))\r | |
2228 | continue\r | |
2229 | \r | |
2230 | if Library not in LibraryList:\r | |
2231 | LibraryList.append(Library)\r | |
2232 | LibraryConsumerList.append(Library)\r | |
2233 | EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library)))\r | |
2234 | return LibraryList\r | |
2235 | \r | |
2236 | ## Calculate the priority value of the build option\r | |
2237 | #\r | |
2238 | # @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
2239 | #\r | |
2240 | # @retval Value Priority value based on the priority list.\r | |
2241 | #\r | |
2242 | def CalculatePriorityValue(self, Key):\r | |
2243 | Target, ToolChain, Arch, CommandType, Attr = Key.split('_')\r | |
2244 | PriorityValue = 0x11111\r | |
2245 | if Target == "*":\r | |
2246 | PriorityValue &= 0x01111\r | |
2247 | if ToolChain == "*":\r | |
2248 | PriorityValue &= 0x10111\r | |
2249 | if Arch == "*":\r | |
2250 | PriorityValue &= 0x11011\r | |
2251 | if CommandType == "*":\r | |
2252 | PriorityValue &= 0x11101\r | |
2253 | if Attr == "*":\r | |
2254 | PriorityValue &= 0x11110\r | |
2255 | \r | |
2256 | return self.PrioList["0x%0.5x" % PriorityValue]\r | |
2257 | \r | |
2258 | \r | |
2259 | ## Expand * in build option key\r | |
2260 | #\r | |
2261 | # @param Options Options to be expanded\r | |
2262 | #\r | |
2263 | # @retval options Options expanded\r | |
2264 | #\r | |
2265 | def _ExpandBuildOption(self, Options, ModuleStyle=None):\r | |
2266 | BuildOptions = {}\r | |
2267 | FamilyMatch = False\r | |
2268 | FamilyIsNull = True\r | |
2269 | \r | |
2270 | OverrideList = {}\r | |
2271 | #\r | |
2272 | # Construct a list contain the build options which need override.\r | |
2273 | #\r | |
2274 | for Key in Options:\r | |
2275 | #\r | |
2276 | # Key[0] -- tool family\r | |
2277 | # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r | |
2278 | #\r | |
2279 | if (Key[0] == self.BuildRuleFamily and\r | |
2280 | (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):\r | |
2281 | Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')\r | |
2282 | if (Target == self.BuildTarget or Target == "*") and\\r | |
2283 | (ToolChain == self.ToolChain or ToolChain == "*") and\\r | |
2284 | (Arch == self.Arch or Arch == "*") and\\r | |
2285 | Options[Key].startswith("="):\r | |
2286 | \r | |
2287 | if OverrideList.get(Key[1]) is not None:\r | |
2288 | OverrideList.pop(Key[1])\r | |
2289 | OverrideList[Key[1]] = Options[Key]\r | |
2290 | \r | |
2291 | #\r | |
2292 | # Use the highest priority value.\r | |
2293 | #\r | |
2294 | if (len(OverrideList) >= 2):\r | |
2295 | KeyList = OverrideList.keys()\r | |
2296 | for Index in range(len(KeyList)):\r | |
2297 | NowKey = KeyList[Index]\r | |
2298 | Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")\r | |
2299 | for Index1 in range(len(KeyList) - Index - 1):\r | |
2300 | NextKey = KeyList[Index1 + Index + 1]\r | |
2301 | #\r | |
2302 | # Compare two Key, if one is included by another, choose the higher priority one\r | |
2303 | #\r | |
2304 | Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")\r | |
2305 | if (Target1 == Target2 or Target1 == "*" or Target2 == "*") and\\r | |
2306 | (ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*") and\\r | |
2307 | (Arch1 == Arch2 or Arch1 == "*" or Arch2 == "*") and\\r | |
2308 | (CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*") and\\r | |
2309 | (Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*"):\r | |
2310 | \r | |
2311 | if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):\r | |
2312 | if Options.get((self.BuildRuleFamily, NextKey)) is not None:\r | |
2313 | Options.pop((self.BuildRuleFamily, NextKey))\r | |
2314 | else:\r | |
2315 | if Options.get((self.BuildRuleFamily, NowKey)) is not None:\r | |
2316 | Options.pop((self.BuildRuleFamily, NowKey))\r | |
2317 | \r | |
2318 | for Key in Options:\r | |
2319 | if ModuleStyle is not None and len (Key) > 2:\r | |
2320 | # Check Module style is EDK or EDKII.\r | |
2321 | # Only append build option for the matched style module.\r | |
2322 | if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r | |
2323 | continue\r | |
2324 | elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:\r | |
2325 | continue\r | |
2326 | Family = Key[0]\r | |
2327 | Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r | |
2328 | # if tool chain family doesn't match, skip it\r | |
2329 | if Tool in self.ToolDefinition and Family != "":\r | |
2330 | FamilyIsNull = False\r | |
2331 | if self.ToolDefinition[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":\r | |
2332 | if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:\r | |
2333 | continue\r | |
2334 | elif Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:\r | |
2335 | continue\r | |
2336 | FamilyMatch = True\r | |
2337 | # expand any wildcard\r | |
2338 | if Target == "*" or Target == self.BuildTarget:\r | |
2339 | if Tag == "*" or Tag == self.ToolChain:\r | |
2340 | if Arch == "*" or Arch == self.Arch:\r | |
2341 | if Tool not in BuildOptions:\r | |
2342 | BuildOptions[Tool] = {}\r | |
2343 | if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r | |
2344 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2345 | else:\r | |
2346 | # append options for the same tool except PATH\r | |
2347 | if Attr != 'PATH':\r | |
2348 | BuildOptions[Tool][Attr] += " " + Options[Key]\r | |
2349 | else:\r | |
2350 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2351 | # Build Option Family has been checked, which need't to be checked again for family.\r | |
2352 | if FamilyMatch or FamilyIsNull:\r | |
2353 | return BuildOptions\r | |
2354 | \r | |
2355 | for Key in Options:\r | |
2356 | if ModuleStyle is not None and len (Key) > 2:\r | |
2357 | # Check Module style is EDK or EDKII.\r | |
2358 | # Only append build option for the matched style module.\r | |
2359 | if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r | |
2360 | continue\r | |
2361 | elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:\r | |
2362 | continue\r | |
2363 | Family = Key[0]\r | |
2364 | Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r | |
2365 | # if tool chain family doesn't match, skip it\r | |
2366 | if Tool not in self.ToolDefinition or Family == "":\r | |
2367 | continue\r | |
2368 | # option has been added before\r | |
2369 | if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:\r | |
2370 | continue\r | |
2371 | \r | |
2372 | # expand any wildcard\r | |
2373 | if Target == "*" or Target == self.BuildTarget:\r | |
2374 | if Tag == "*" or Tag == self.ToolChain:\r | |
2375 | if Arch == "*" or Arch == self.Arch:\r | |
2376 | if Tool not in BuildOptions:\r | |
2377 | BuildOptions[Tool] = {}\r | |
2378 | if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):\r | |
2379 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2380 | else:\r | |
2381 | # append options for the same tool except PATH\r | |
2382 | if Attr != 'PATH':\r | |
2383 | BuildOptions[Tool][Attr] += " " + Options[Key]\r | |
2384 | else:\r | |
2385 | BuildOptions[Tool][Attr] = Options[Key]\r | |
2386 | return BuildOptions\r | |
2387 | \r | |
2388 | ## Append build options in platform to a module\r | |
2389 | #\r | |
2390 | # @param Module The module to which the build options will be appened\r | |
2391 | #\r | |
2392 | # @retval options The options appended with build options in platform\r | |
2393 | #\r | |
2394 | def ApplyBuildOption(self, Module):\r | |
2395 | # Get the different options for the different style module\r | |
2396 | if Module.AutoGenVersion < 0x00010005:\r | |
2397 | PlatformOptions = self.EdkBuildOption\r | |
2398 | ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDK_NAME, Module.ModuleType)\r | |
2399 | else:\r | |
2400 | PlatformOptions = self.EdkIIBuildOption\r | |
2401 | ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)\r | |
2402 | ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)\r | |
2403 | ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)\r | |
2404 | if Module in self.Platform.Modules:\r | |
2405 | PlatformModule = self.Platform.Modules[str(Module)]\r | |
2406 | PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)\r | |
2407 | else:\r | |
2408 | PlatformModuleOptions = {}\r | |
2409 | \r | |
2410 | BuildRuleOrder = None\r | |
2411 | for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r | |
2412 | for Tool in Options:\r | |
2413 | for Attr in Options[Tool]:\r | |
2414 | if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r | |
2415 | BuildRuleOrder = Options[Tool][Attr]\r | |
2416 | \r | |
2417 | AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +\r | |
2418 | PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +\r | |
2419 | self.ToolDefinition.keys())\r | |
2420 | BuildOptions = defaultdict(lambda: defaultdict(str))\r | |
2421 | for Tool in AllTools:\r | |
2422 | for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:\r | |
2423 | if Tool not in Options:\r | |
2424 | continue\r | |
2425 | for Attr in Options[Tool]:\r | |
2426 | #\r | |
2427 | # Do not generate it in Makefile\r | |
2428 | #\r | |
2429 | if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:\r | |
2430 | continue\r | |
2431 | Value = Options[Tool][Attr]\r | |
2432 | # check if override is indicated\r | |
2433 | if Value.startswith('='):\r | |
2434 | BuildOptions[Tool][Attr] = mws.handleWsMacro(Value[1:])\r | |
2435 | else:\r | |
2436 | if Attr != 'PATH':\r | |
2437 | BuildOptions[Tool][Attr] += " " + mws.handleWsMacro(Value)\r | |
2438 | else:\r | |
2439 | BuildOptions[Tool][Attr] = mws.handleWsMacro(Value)\r | |
2440 | \r | |
2441 | if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:\r | |
2442 | #\r | |
2443 | # Override UNI flag only for EDK module.\r | |
2444 | #\r | |
2445 | BuildOptions['BUILD']['FLAGS'] = self.Workspace.UniFlag\r | |
2446 | return BuildOptions, BuildRuleOrder\r | |
2447 | \r | |
2448 | Platform = property(_GetPlatform)\r | |
2449 | Name = property(_GetName)\r | |
2450 | Guid = property(_GetGuid)\r | |
2451 | Version = property(_GetVersion)\r | |
2452 | \r | |
2453 | OutputDir = property(_GetOutputDir)\r | |
2454 | BuildDir = property(_GetBuildDir)\r | |
2455 | MakeFileDir = property(_GetMakeFileDir)\r | |
2456 | FdfFile = property(_GetFdfFile)\r | |
2457 | \r | |
2458 | PcdTokenNumber = property(_GetPcdTokenNumbers) # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber\r | |
2459 | DynamicPcdList = property(_GetDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r | |
2460 | NonDynamicPcdList = property(_GetNonDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]\r | |
2461 | NonDynamicPcdDict = property(_GetNonDynamicPcdDict)\r | |
2462 | PackageList = property(_GetPackageList)\r | |
2463 | \r | |
2464 | ToolDefinition = property(_GetToolDefinition) # toolcode : tool path\r | |
2465 | ToolDefinitionFile = property(_GetToolDefFile) # toolcode : lib path\r | |
2466 | ToolChainFamily = property(_GetToolChainFamily)\r | |
2467 | BuildRuleFamily = property(_GetBuildRuleFamily)\r | |
2468 | BuildOption = property(_GetBuildOptions) # toolcode : option\r | |
2469 | EdkBuildOption = property(_GetEdkBuildOptions) # edktoolcode : option\r | |
2470 | EdkIIBuildOption = property(_GetEdkIIBuildOptions) # edkiitoolcode : option\r | |
2471 | \r | |
2472 | BuildCommand = property(_GetBuildCommand)\r | |
2473 | BuildRule = property(_GetBuildRule)\r | |
2474 | ModuleAutoGenList = property(_GetModuleAutoGenList)\r | |
2475 | LibraryAutoGenList = property(_GetLibraryAutoGenList)\r | |
2476 | GenFdsCommand = property(_GenFdsCommand)\r | |
2477 | \r | |
2478 | #\r | |
2479 | # extend lists contained in a dictionary with lists stored in another dictionary\r | |
2480 | # if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r | |
2481 | #\r | |
2482 | def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r | |
2483 | for Key in CopyFromDict:\r | |
2484 | CopyToDict[Key].extend(CopyFromDict[Key])\r | |
2485 | \r | |
2486 | # Create a directory specified by a set of path elements and return the full path\r | |
2487 | def _MakeDir(PathList):\r | |
2488 | RetVal = path.join(*PathList)\r | |
2489 | CreateDirectory(RetVal)\r | |
2490 | return RetVal\r | |
2491 | \r | |
2492 | ## ModuleAutoGen class\r | |
2493 | #\r | |
2494 | # This class encapsules the AutoGen behaviors for the build tools. In addition to\r | |
2495 | # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r | |
2496 | # to the [depex] section in module's inf file.\r | |
2497 | #\r | |
2498 | class ModuleAutoGen(AutoGen):\r | |
2499 | # call super().__init__ then call the worker function with different parameter count\r | |
2500 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
2501 | if not hasattr(self, "_Init"):\r | |
2502 | super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
2503 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r | |
2504 | self._Init = True\r | |
2505 | \r | |
2506 | ## Cache the timestamps of metafiles of every module in a class attribute\r | |
2507 | #\r | |
2508 | TimeDict = {}\r | |
2509 | \r | |
2510 | def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
2511 | # check if this module is employed by active platform\r | |
2512 | if not PlatformAutoGen(Workspace, args[0], Target, Toolchain, Arch).ValidModule(MetaFile):\r | |
2513 | EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r | |
2514 | % (MetaFile, Arch))\r | |
2515 | return None\r | |
2516 | return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
2517 | \r | |
2518 | ## Initialize ModuleAutoGen\r | |
2519 | #\r | |
2520 | # @param Workspace EdkIIWorkspaceBuild object\r | |
2521 | # @param ModuleFile The path of module file\r | |
2522 | # @param Target Build target (DEBUG, RELEASE)\r | |
2523 | # @param Toolchain Name of tool chain\r | |
2524 | # @param Arch The arch the module supports\r | |
2525 | # @param PlatformFile Platform meta-file\r | |
2526 | #\r | |
2527 | def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile):\r | |
2528 | EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r | |
2529 | GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r | |
2530 | \r | |
2531 | self.Workspace = Workspace\r | |
2532 | self.WorkspaceDir = Workspace.WorkspaceDir\r | |
2533 | self._GuidDict = Workspace._GuidDict\r | |
2534 | self.MetaFile = ModuleFile\r | |
2535 | self.PlatformInfo = PlatformAutoGen(Workspace, PlatformFile, Target, Toolchain, Arch)\r | |
2536 | \r | |
2537 | self.SourceDir = self.MetaFile.SubDir\r | |
2538 | self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r | |
2539 | \r | |
2540 | self.SourceOverrideDir = None\r | |
2541 | # use overrided path defined in DSC file\r | |
2542 | if self.MetaFile.Key in GlobalData.gOverrideDir:\r | |
2543 | self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]\r | |
2544 | \r | |
2545 | self.ToolChain = Toolchain\r | |
2546 | self.BuildTarget = Target\r | |
2547 | self.Arch = Arch\r | |
2548 | self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r | |
2549 | self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r | |
2550 | \r | |
2551 | self.IsCodeFileCreated = False\r | |
2552 | self.IsAsBuiltInfCreated = False\r | |
2553 | self.DepexGenerated = False\r | |
2554 | \r | |
2555 | self.BuildDatabase = self.Workspace.BuildDatabase\r | |
2556 | self.BuildRuleOrder = None\r | |
2557 | self.BuildTime = 0\r | |
2558 | \r | |
2559 | self._PcdComments = OrderedListDict()\r | |
2560 | self._GuidComments = OrderedListDict()\r | |
2561 | self._ProtocolComments = OrderedListDict()\r | |
2562 | self._PpiComments = OrderedListDict()\r | |
2563 | self._BuildTargets = None\r | |
2564 | self._IntroBuildTargetList = None\r | |
2565 | self._FinalBuildTargetList = None\r | |
2566 | self._FileTypes = None\r | |
2567 | \r | |
2568 | self.AutoGenDepSet = set()\r | |
2569 | self.ReferenceModules = []\r | |
2570 | self.ConstPcd = {}\r | |
2571 | \r | |
2572 | \r | |
2573 | def __repr__(self):\r | |
2574 | return "%s [%s]" % (self.MetaFile, self.Arch)\r | |
2575 | \r | |
2576 | # Get FixedAtBuild Pcds of this Module\r | |
2577 | @cached_property\r | |
2578 | def FixedAtBuildPcds(self):\r | |
2579 | RetVal = []\r | |
2580 | for Pcd in self.ModulePcdList:\r | |
2581 | if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r | |
2582 | continue\r | |
2583 | if Pcd not in RetVal:\r | |
2584 | RetVal.append(Pcd)\r | |
2585 | return RetVal\r | |
2586 | \r | |
2587 | @cached_property\r | |
2588 | def FixedVoidTypePcds(self):\r | |
2589 | RetVal = {}\r | |
2590 | for Pcd in self.FixedAtBuildPcds:\r | |
2591 | if Pcd.DatumType == TAB_VOID:\r | |
2592 | if '{}.{}'.format(Pcd.TokenSpaceGuidCName, Pcd.TokenCName) not in RetVal:\r | |
2593 | RetVal['{}.{}'.format(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)] = Pcd.DefaultValue\r | |
2594 | return RetVal\r | |
2595 | \r | |
2596 | @property\r | |
2597 | def UniqueBaseName(self):\r | |
2598 | BaseName = self.Name\r | |
2599 | for Module in self.PlatformInfo.ModuleAutoGenList:\r | |
2600 | if Module.MetaFile == self.MetaFile:\r | |
2601 | continue\r | |
2602 | if Module.Name == self.Name:\r | |
2603 | if uuid.UUID(Module.Guid) == uuid.UUID(self.Guid):\r | |
2604 | EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'\r | |
2605 | ' %s\n %s' % (Module.MetaFile, self.MetaFile))\r | |
2606 | BaseName = '%s_%s' % (self.Name, self.Guid)\r | |
2607 | return BaseName\r | |
2608 | \r | |
2609 | # Macros could be used in build_rule.txt (also Makefile)\r | |
2610 | @cached_property\r | |
2611 | def Macros(self):\r | |
2612 | return OrderedDict((\r | |
2613 | ("WORKSPACE" ,self.WorkspaceDir),\r | |
2614 | ("MODULE_NAME" ,self.Name),\r | |
2615 | ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r | |
2616 | ("MODULE_GUID" ,self.Guid),\r | |
2617 | ("MODULE_VERSION" ,self.Version),\r | |
2618 | ("MODULE_TYPE" ,self.ModuleType),\r | |
2619 | ("MODULE_FILE" ,str(self.MetaFile)),\r | |
2620 | ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r | |
2621 | ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r | |
2622 | ("MODULE_DIR" ,self.SourceDir),\r | |
2623 | ("BASE_NAME" ,self.Name),\r | |
2624 | ("ARCH" ,self.Arch),\r | |
2625 | ("TOOLCHAIN" ,self.ToolChain),\r | |
2626 | ("TOOLCHAIN_TAG" ,self.ToolChain),\r | |
2627 | ("TOOL_CHAIN_TAG" ,self.ToolChain),\r | |
2628 | ("TARGET" ,self.BuildTarget),\r | |
2629 | ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r | |
2630 | ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
2631 | ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
2632 | ("MODULE_BUILD_DIR" ,self.BuildDir),\r | |
2633 | ("OUTPUT_DIR" ,self.OutputDir),\r | |
2634 | ("DEBUG_DIR" ,self.DebugDir),\r | |
2635 | ("DEST_DIR_OUTPUT" ,self.OutputDir),\r | |
2636 | ("DEST_DIR_DEBUG" ,self.DebugDir),\r | |
2637 | ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r | |
2638 | ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r | |
2639 | ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r | |
2640 | ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r | |
2641 | ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r | |
2642 | ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r | |
2643 | ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r | |
2644 | ))\r | |
2645 | \r | |
2646 | ## Return the module build data object\r | |
2647 | @cached_property\r | |
2648 | def Module(self):\r | |
2649 | return self.Workspace.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
2650 | \r | |
2651 | ## Return the module name\r | |
2652 | @cached_property\r | |
2653 | def Name(self):\r | |
2654 | return self.Module.BaseName\r | |
2655 | \r | |
2656 | ## Return the module DxsFile if exist\r | |
2657 | @cached_property\r | |
2658 | def DxsFile(self):\r | |
2659 | return self.Module.DxsFile\r | |
2660 | \r | |
2661 | ## Return the module meta-file GUID\r | |
2662 | @cached_property\r | |
2663 | def Guid(self):\r | |
2664 | #\r | |
2665 | # To build same module more than once, the module path with FILE_GUID overridden has\r | |
2666 | # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path\r | |
2667 | # in DSC. The overridden GUID can be retrieved from file name\r | |
2668 | #\r | |
2669 | if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r | |
2670 | #\r | |
2671 | # Length of GUID is 36\r | |
2672 | #\r | |
2673 | return os.path.basename(self.MetaFile.Path)[:36]\r | |
2674 | return self.Module.Guid\r | |
2675 | \r | |
2676 | ## Return the module version\r | |
2677 | @cached_property\r | |
2678 | def Version(self):\r | |
2679 | return self.Module.Version\r | |
2680 | \r | |
2681 | ## Return the module type\r | |
2682 | @cached_property\r | |
2683 | def ModuleType(self):\r | |
2684 | return self.Module.ModuleType\r | |
2685 | \r | |
2686 | ## Return the component type (for Edk.x style of module)\r | |
2687 | @cached_property\r | |
2688 | def ComponentType(self):\r | |
2689 | return self.Module.ComponentType\r | |
2690 | \r | |
2691 | ## Return the build type\r | |
2692 | @cached_property\r | |
2693 | def BuildType(self):\r | |
2694 | return self.Module.BuildType\r | |
2695 | \r | |
2696 | ## Return the PCD_IS_DRIVER setting\r | |
2697 | @cached_property\r | |
2698 | def PcdIsDriver(self):\r | |
2699 | return self.Module.PcdIsDriver\r | |
2700 | \r | |
2701 | ## Return the autogen version, i.e. module meta-file version\r | |
2702 | @cached_property\r | |
2703 | def AutoGenVersion(self):\r | |
2704 | return self.Module.AutoGenVersion\r | |
2705 | \r | |
2706 | ## Check if the module is library or not\r | |
2707 | @cached_property\r | |
2708 | def IsLibrary(self):\r | |
2709 | return bool(self.Module.LibraryClass)\r | |
2710 | \r | |
2711 | ## Check if the module is binary module or not\r | |
2712 | @cached_property\r | |
2713 | def IsBinaryModule(self):\r | |
2714 | return self.Module.IsBinaryModule\r | |
2715 | \r | |
2716 | ## Return the directory to store intermediate files of the module\r | |
2717 | @cached_property\r | |
2718 | def BuildDir(self):\r | |
2719 | return _MakeDir((\r | |
2720 | self.PlatformInfo.BuildDir,\r | |
2721 | self.Arch,\r | |
2722 | self.SourceDir,\r | |
2723 | self.MetaFile.BaseName\r | |
2724 | ))\r | |
2725 | \r | |
2726 | ## Return the directory to store the intermediate object files of the mdoule\r | |
2727 | @cached_property\r | |
2728 | def OutputDir(self):\r | |
2729 | return _MakeDir((self.BuildDir, "OUTPUT"))\r | |
2730 | \r | |
2731 | ## Return the directory path to store ffs file\r | |
2732 | @cached_property\r | |
2733 | def FfsOutputDir(self):\r | |
2734 | if GlobalData.gFdfParser:\r | |
2735 | return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r | |
2736 | return ''\r | |
2737 | \r | |
2738 | ## Return the directory to store auto-gened source files of the mdoule\r | |
2739 | @cached_property\r | |
2740 | def DebugDir(self):\r | |
2741 | return _MakeDir((self.BuildDir, "DEBUG"))\r | |
2742 | \r | |
2743 | ## Return the path of custom file\r | |
2744 | @cached_property\r | |
2745 | def CustomMakefile(self):\r | |
2746 | RetVal = {}\r | |
2747 | for Type in self.Module.CustomMakefile:\r | |
2748 | MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r | |
2749 | if self.SourceOverrideDir is not None:\r | |
2750 | File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])\r | |
2751 | if not os.path.exists(File):\r | |
2752 | File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r | |
2753 | else:\r | |
2754 | File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r | |
2755 | RetVal[MakeType] = File\r | |
2756 | return RetVal\r | |
2757 | \r | |
2758 | ## Return the directory of the makefile\r | |
2759 | #\r | |
2760 | # @retval string The directory string of module's makefile\r | |
2761 | #\r | |
2762 | @cached_property\r | |
2763 | def MakeFileDir(self):\r | |
2764 | return self.BuildDir\r | |
2765 | \r | |
2766 | ## Return build command string\r | |
2767 | #\r | |
2768 | # @retval string Build command string\r | |
2769 | #\r | |
2770 | @cached_property\r | |
2771 | def BuildCommand(self):\r | |
2772 | return self.PlatformInfo.BuildCommand\r | |
2773 | \r | |
2774 | ## Get object list of all packages the module and its dependent libraries belong to\r | |
2775 | #\r | |
2776 | # @retval list The list of package object\r | |
2777 | #\r | |
2778 | @cached_property\r | |
2779 | def DerivedPackageList(self):\r | |
2780 | PackageList = []\r | |
2781 | for M in [self.Module] + self.DependentLibraryList:\r | |
2782 | for Package in M.Packages:\r | |
2783 | if Package in PackageList:\r | |
2784 | continue\r | |
2785 | PackageList.append(Package)\r | |
2786 | return PackageList\r | |
2787 | \r | |
2788 | ## Get the depex string\r | |
2789 | #\r | |
2790 | # @return : a string contain all depex expresion.\r | |
2791 | def _GetDepexExpresionString(self):\r | |
2792 | DepexStr = ''\r | |
2793 | DepexList = []\r | |
2794 | ## DPX_SOURCE IN Define section.\r | |
2795 | if self.Module.DxsFile:\r | |
2796 | return DepexStr\r | |
2797 | for M in [self.Module] + self.DependentLibraryList:\r | |
2798 | Filename = M.MetaFile.Path\r | |
2799 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
2800 | DepexExpresionList = InfObj.GetDepexExpresionList()\r | |
2801 | for DepexExpresion in DepexExpresionList:\r | |
2802 | for key in DepexExpresion:\r | |
2803 | Arch, ModuleType = key\r | |
2804 | DepexExpr = [x for x in DepexExpresion[key] if not str(x).startswith('#')]\r | |
2805 | # the type of build module is USER_DEFINED.\r | |
2806 | # All different DEPEX section tags would be copied into the As Built INF file\r | |
2807 | # and there would be separate DEPEX section tags\r | |
2808 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:\r | |
2809 | if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r | |
2810 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
2811 | else:\r | |
2812 | if Arch.upper() == TAB_ARCH_COMMON or \\r | |
2813 | (Arch.upper() == self.Arch.upper() and \\r | |
2814 | ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r | |
2815 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
2816 | \r | |
2817 | #the type of build module is USER_DEFINED.\r | |
2818 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:\r | |
2819 | for Depex in DepexList:\r | |
2820 | for key in Depex:\r | |
2821 | DepexStr += '[Depex.%s.%s]\n' % key\r | |
2822 | DepexStr += '\n'.join('# '+ val for val in Depex[key])\r | |
2823 | DepexStr += '\n\n'\r | |
2824 | if not DepexStr:\r | |
2825 | return '[Depex.%s]\n' % self.Arch\r | |
2826 | return DepexStr\r | |
2827 | \r | |
2828 | #the type of build module not is USER_DEFINED.\r | |
2829 | Count = 0\r | |
2830 | for Depex in DepexList:\r | |
2831 | Count += 1\r | |
2832 | if DepexStr != '':\r | |
2833 | DepexStr += ' AND '\r | |
2834 | DepexStr += '('\r | |
2835 | for D in Depex.values():\r | |
2836 | DepexStr += ' '.join(val for val in D)\r | |
2837 | Index = DepexStr.find('END')\r | |
2838 | if Index > -1 and Index == len(DepexStr) - 3:\r | |
2839 | DepexStr = DepexStr[:-3]\r | |
2840 | DepexStr = DepexStr.strip()\r | |
2841 | DepexStr += ')'\r | |
2842 | if Count == 1:\r | |
2843 | DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r | |
2844 | if not DepexStr:\r | |
2845 | return '[Depex.%s]\n' % self.Arch\r | |
2846 | return '[Depex.%s]\n# ' % self.Arch + DepexStr\r | |
2847 | \r | |
2848 | ## Merge dependency expression\r | |
2849 | #\r | |
2850 | # @retval list The token list of the dependency expression after parsed\r | |
2851 | #\r | |
2852 | @cached_property\r | |
2853 | def DepexList(self):\r | |
2854 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
2855 | return {}\r | |
2856 | \r | |
2857 | DepexList = []\r | |
2858 | #\r | |
2859 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r | |
2860 | #\r | |
2861 | for M in [self.Module] + self.DependentLibraryList:\r | |
2862 | Inherited = False\r | |
2863 | for D in M.Depex[self.Arch, self.ModuleType]:\r | |
2864 | if DepexList != []:\r | |
2865 | DepexList.append('AND')\r | |
2866 | DepexList.append('(')\r | |
2867 | #replace D with value if D is FixedAtBuild PCD\r | |
2868 | NewList = []\r | |
2869 | for item in D:\r | |
2870 | if '.' not in item:\r | |
2871 | NewList.append(item)\r | |
2872 | else:\r | |
2873 | if item not in self._FixedPcdVoidTypeDict:\r | |
2874 | EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r | |
2875 | else:\r | |
2876 | Value = self._FixedPcdVoidTypeDict[item]\r | |
2877 | if len(Value.split(',')) != 16:\r | |
2878 | EdkLogger.error("build", FORMAT_INVALID,\r | |
2879 | "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r | |
2880 | NewList.append(Value)\r | |
2881 | DepexList.extend(NewList)\r | |
2882 | if DepexList[-1] == 'END': # no need of a END at this time\r | |
2883 | DepexList.pop()\r | |
2884 | DepexList.append(')')\r | |
2885 | Inherited = True\r | |
2886 | if Inherited:\r | |
2887 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexList))\r | |
2888 | if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r | |
2889 | break\r | |
2890 | if len(DepexList) > 0:\r | |
2891 | EdkLogger.verbose('')\r | |
2892 | return {self.ModuleType:DepexList}\r | |
2893 | \r | |
2894 | ## Merge dependency expression\r | |
2895 | #\r | |
2896 | # @retval list The token list of the dependency expression after parsed\r | |
2897 | #\r | |
2898 | @cached_property\r | |
2899 | def DepexExpressionDict(self):\r | |
2900 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
2901 | return {}\r | |
2902 | \r | |
2903 | DepexExpressionString = ''\r | |
2904 | #\r | |
2905 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r | |
2906 | #\r | |
2907 | for M in [self.Module] + self.DependentLibraryList:\r | |
2908 | Inherited = False\r | |
2909 | for D in M.DepexExpression[self.Arch, self.ModuleType]:\r | |
2910 | if DepexExpressionString != '':\r | |
2911 | DepexExpressionString += ' AND '\r | |
2912 | DepexExpressionString += '('\r | |
2913 | DepexExpressionString += D\r | |
2914 | DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r | |
2915 | DepexExpressionString += ')'\r | |
2916 | Inherited = True\r | |
2917 | if Inherited:\r | |
2918 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r | |
2919 | if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r | |
2920 | break\r | |
2921 | if len(DepexExpressionString) > 0:\r | |
2922 | EdkLogger.verbose('')\r | |
2923 | \r | |
2924 | return {self.ModuleType:DepexExpressionString}\r | |
2925 | \r | |
2926 | # Get the tiano core user extension, it is contain dependent library.\r | |
2927 | # @retval: a list contain tiano core userextension.\r | |
2928 | #\r | |
2929 | def _GetTianoCoreUserExtensionList(self):\r | |
2930 | TianoCoreUserExtentionList = []\r | |
2931 | for M in [self.Module] + self.DependentLibraryList:\r | |
2932 | Filename = M.MetaFile.Path\r | |
2933 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
2934 | TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r | |
2935 | for TianoCoreUserExtent in TianoCoreUserExtenList:\r | |
2936 | for Section in TianoCoreUserExtent:\r | |
2937 | ItemList = Section.split(TAB_SPLIT)\r | |
2938 | Arch = self.Arch\r | |
2939 | if len(ItemList) == 4:\r | |
2940 | Arch = ItemList[3]\r | |
2941 | if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r | |
2942 | TianoCoreList = []\r | |
2943 | TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r | |
2944 | TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r | |
2945 | TianoCoreList.append('\n')\r | |
2946 | TianoCoreUserExtentionList.append(TianoCoreList)\r | |
2947 | \r | |
2948 | return TianoCoreUserExtentionList\r | |
2949 | \r | |
2950 | ## Return the list of specification version required for the module\r | |
2951 | #\r | |
2952 | # @retval list The list of specification defined in module file\r | |
2953 | #\r | |
2954 | @cached_property\r | |
2955 | def Specification(self):\r | |
2956 | return self.Module.Specification\r | |
2957 | \r | |
2958 | ## Tool option for the module build\r | |
2959 | #\r | |
2960 | # @param PlatformInfo The object of PlatformBuildInfo\r | |
2961 | # @retval dict The dict containing valid options\r | |
2962 | #\r | |
2963 | @cached_property\r | |
2964 | def BuildOption(self):\r | |
2965 | RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r | |
2966 | if self.BuildRuleOrder:\r | |
2967 | self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r | |
2968 | return RetVal\r | |
2969 | \r | |
2970 | ## Get include path list from tool option for the module build\r | |
2971 | #\r | |
2972 | # @retval list The include path list\r | |
2973 | #\r | |
2974 | @cached_property\r | |
2975 | def BuildOptionIncPathList(self):\r | |
2976 | #\r | |
2977 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
2978 | # is the former use /I , the Latter used -I to specify include directories\r | |
2979 | #\r | |
2980 | if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r | |
2981 | BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r | |
2982 | elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r | |
2983 | BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r | |
2984 | else:\r | |
2985 | #\r | |
2986 | # New ToolChainFamily, don't known whether there is option to specify include directories\r | |
2987 | #\r | |
2988 | return []\r | |
2989 | \r | |
2990 | RetVal = []\r | |
2991 | for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r | |
2992 | try:\r | |
2993 | FlagOption = self.BuildOption[Tool]['FLAGS']\r | |
2994 | except KeyError:\r | |
2995 | FlagOption = ''\r | |
2996 | \r | |
2997 | if self.PlatformInfo.ToolChainFamily != 'RVCT':\r | |
2998 | IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r | |
2999 | else:\r | |
3000 | #\r | |
3001 | # RVCT may specify a list of directory seperated by commas\r | |
3002 | #\r | |
3003 | IncPathList = []\r | |
3004 | for Path in BuildOptIncludeRegEx.findall(FlagOption):\r | |
3005 | PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r | |
3006 | IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r | |
3007 | \r | |
3008 | #\r | |
3009 | # EDK II modules must not reference header files outside of the packages they depend on or\r | |
3010 | # within the module's directory tree. Report error if violation.\r | |
3011 | #\r | |
3012 | if self.AutoGenVersion >= 0x00010005:\r | |
3013 | for Path in IncPathList:\r | |
3014 | if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r | |
3015 | ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r | |
3016 | EdkLogger.error("build",\r | |
3017 | PARAMETER_INVALID,\r | |
3018 | ExtraData=ErrMsg,\r | |
3019 | File=str(self.MetaFile))\r | |
3020 | RetVal += IncPathList\r | |
3021 | return RetVal\r | |
3022 | \r | |
3023 | ## Return a list of files which can be built from source\r | |
3024 | #\r | |
3025 | # What kind of files can be built is determined by build rules in\r | |
3026 | # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r | |
3027 | #\r | |
3028 | @cached_property\r | |
3029 | def SourceFileList(self):\r | |
3030 | RetVal = []\r | |
3031 | ToolChainTagSet = {"", "*", self.ToolChain}\r | |
3032 | ToolChainFamilySet = {"", "*", self.ToolChainFamily, self.BuildRuleFamily}\r | |
3033 | for F in self.Module.Sources:\r | |
3034 | # match tool chain\r | |
3035 | if F.TagName not in ToolChainTagSet:\r | |
3036 | EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r | |
3037 | "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r | |
3038 | continue\r | |
3039 | # match tool chain family or build rule family\r | |
3040 | if F.ToolChainFamily not in ToolChainFamilySet:\r | |
3041 | EdkLogger.debug(\r | |
3042 | EdkLogger.DEBUG_0,\r | |
3043 | "The file [%s] must be built by tools of [%s], " \\r | |
3044 | "but current toolchain family is [%s], buildrule family is [%s]" \\r | |
3045 | % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r | |
3046 | continue\r | |
3047 | \r | |
3048 | # add the file path into search path list for file including\r | |
3049 | if F.Dir not in self.IncludePathList and self.AutoGenVersion >= 0x00010005:\r | |
3050 | self.IncludePathList.insert(0, F.Dir)\r | |
3051 | RetVal.append(F)\r | |
3052 | \r | |
3053 | self._MatchBuildRuleOrder(RetVal)\r | |
3054 | \r | |
3055 | for F in RetVal:\r | |
3056 | self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r | |
3057 | return RetVal\r | |
3058 | \r | |
3059 | def _MatchBuildRuleOrder(self, FileList):\r | |
3060 | Order_Dict = {}\r | |
3061 | self.BuildOption\r | |
3062 | for SingleFile in FileList:\r | |
3063 | if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r | |
3064 | key = SingleFile.Path.split(SingleFile.Ext)[0]\r | |
3065 | if key in Order_Dict:\r | |
3066 | Order_Dict[key].append(SingleFile.Ext)\r | |
3067 | else:\r | |
3068 | Order_Dict[key] = [SingleFile.Ext]\r | |
3069 | \r | |
3070 | RemoveList = []\r | |
3071 | for F in Order_Dict:\r | |
3072 | if len(Order_Dict[F]) > 1:\r | |
3073 | Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r | |
3074 | for Ext in Order_Dict[F][1:]:\r | |
3075 | RemoveList.append(F + Ext)\r | |
3076 | \r | |
3077 | for item in RemoveList:\r | |
3078 | FileList.remove(item)\r | |
3079 | \r | |
3080 | return FileList\r | |
3081 | \r | |
3082 | ## Return the list of unicode files\r | |
3083 | @cached_property\r | |
3084 | def UnicodeFileList(self):\r | |
3085 | return self.FileTypes.get(TAB_UNICODE_FILE,[])\r | |
3086 | \r | |
3087 | ## Return the list of vfr files\r | |
3088 | @cached_property\r | |
3089 | def VfrFileList(self):\r | |
3090 | return self.FileTypes.get(TAB_VFR_FILE, [])\r | |
3091 | \r | |
3092 | ## Return the list of Image Definition files\r | |
3093 | @cached_property\r | |
3094 | def IdfFileList(self):\r | |
3095 | return self.FileTypes.get(TAB_IMAGE_FILE,[])\r | |
3096 | \r | |
3097 | ## Return a list of files which can be built from binary\r | |
3098 | #\r | |
3099 | # "Build" binary files are just to copy them to build directory.\r | |
3100 | #\r | |
3101 | # @retval list The list of files which can be built later\r | |
3102 | #\r | |
3103 | @cached_property\r | |
3104 | def BinaryFileList(self):\r | |
3105 | RetVal = []\r | |
3106 | for F in self.Module.Binaries:\r | |
3107 | if F.Target not in [TAB_ARCH_COMMON, '*'] and F.Target != self.BuildTarget:\r | |
3108 | continue\r | |
3109 | RetVal.append(F)\r | |
3110 | self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r | |
3111 | return RetVal\r | |
3112 | \r | |
3113 | @cached_property\r | |
3114 | def BuildRules(self):\r | |
3115 | RetVal = {}\r | |
3116 | BuildRuleDatabase = self.PlatformInfo.BuildRule\r | |
3117 | for Type in BuildRuleDatabase.FileTypeList:\r | |
3118 | #first try getting build rule by BuildRuleFamily\r | |
3119 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r | |
3120 | if not RuleObject:\r | |
3121 | # build type is always module type, but ...\r | |
3122 | if self.ModuleType != self.BuildType:\r | |
3123 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r | |
3124 | #second try getting build rule by ToolChainFamily\r | |
3125 | if not RuleObject:\r | |
3126 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r | |
3127 | if not RuleObject:\r | |
3128 | # build type is always module type, but ...\r | |
3129 | if self.ModuleType != self.BuildType:\r | |
3130 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r | |
3131 | if not RuleObject:\r | |
3132 | continue\r | |
3133 | RuleObject = RuleObject.Instantiate(self.Macros)\r | |
3134 | RetVal[Type] = RuleObject\r | |
3135 | for Ext in RuleObject.SourceFileExtList:\r | |
3136 | RetVal[Ext] = RuleObject\r | |
3137 | return RetVal\r | |
3138 | \r | |
3139 | def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r | |
3140 | if self._BuildTargets is None:\r | |
3141 | self._IntroBuildTargetList = set()\r | |
3142 | self._FinalBuildTargetList = set()\r | |
3143 | self._BuildTargets = defaultdict(set)\r | |
3144 | self._FileTypes = defaultdict(set)\r | |
3145 | \r | |
3146 | if not BinaryFileList:\r | |
3147 | BinaryFileList = self.BinaryFileList\r | |
3148 | \r | |
3149 | SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r | |
3150 | if not os.path.exists(SubDirectory):\r | |
3151 | CreateDirectory(SubDirectory)\r | |
3152 | LastTarget = None\r | |
3153 | RuleChain = set()\r | |
3154 | SourceList = [File]\r | |
3155 | Index = 0\r | |
3156 | #\r | |
3157 | # Make sure to get build rule order value\r | |
3158 | #\r | |
3159 | self.BuildOption\r | |
3160 | \r | |
3161 | while Index < len(SourceList):\r | |
3162 | Source = SourceList[Index]\r | |
3163 | Index = Index + 1\r | |
3164 | \r | |
3165 | if Source != File:\r | |
3166 | CreateDirectory(Source.Dir)\r | |
3167 | \r | |
3168 | if File.IsBinary and File == Source and File in BinaryFileList:\r | |
3169 | # Skip all files that are not binary libraries\r | |
3170 | if not self.IsLibrary:\r | |
3171 | continue\r | |
3172 | RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r | |
3173 | elif FileType in self.BuildRules:\r | |
3174 | RuleObject = self.BuildRules[FileType]\r | |
3175 | elif Source.Ext in self.BuildRules:\r | |
3176 | RuleObject = self.BuildRules[Source.Ext]\r | |
3177 | else:\r | |
3178 | # stop at no more rules\r | |
3179 | if LastTarget:\r | |
3180 | self._FinalBuildTargetList.add(LastTarget)\r | |
3181 | break\r | |
3182 | \r | |
3183 | FileType = RuleObject.SourceFileType\r | |
3184 | self._FileTypes[FileType].add(Source)\r | |
3185 | \r | |
3186 | # stop at STATIC_LIBRARY for library\r | |
3187 | if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r | |
3188 | if LastTarget:\r | |
3189 | self._FinalBuildTargetList.add(LastTarget)\r | |
3190 | break\r | |
3191 | \r | |
3192 | Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r | |
3193 | if not Target:\r | |
3194 | if LastTarget:\r | |
3195 | self._FinalBuildTargetList.add(LastTarget)\r | |
3196 | break\r | |
3197 | elif not Target.Outputs:\r | |
3198 | # Only do build for target with outputs\r | |
3199 | self._FinalBuildTargetList.add(Target)\r | |
3200 | \r | |
3201 | self._BuildTargets[FileType].add(Target)\r | |
3202 | \r | |
3203 | if not Source.IsBinary and Source == File:\r | |
3204 | self._IntroBuildTargetList.add(Target)\r | |
3205 | \r | |
3206 | # to avoid cyclic rule\r | |
3207 | if FileType in RuleChain:\r | |
3208 | break\r | |
3209 | \r | |
3210 | RuleChain.add(FileType)\r | |
3211 | SourceList.extend(Target.Outputs)\r | |
3212 | LastTarget = Target\r | |
3213 | FileType = TAB_UNKNOWN_FILE\r | |
3214 | \r | |
3215 | @cached_property\r | |
3216 | def Targets(self):\r | |
3217 | if self._BuildTargets is None:\r | |
3218 | self._IntroBuildTargetList = set()\r | |
3219 | self._FinalBuildTargetList = set()\r | |
3220 | self._BuildTargets = defaultdict(set)\r | |
3221 | self._FileTypes = defaultdict(set)\r | |
3222 | \r | |
3223 | #TRICK: call SourceFileList property to apply build rule for source files\r | |
3224 | self.SourceFileList\r | |
3225 | \r | |
3226 | #TRICK: call _GetBinaryFileList to apply build rule for binary files\r | |
3227 | self.BinaryFileList\r | |
3228 | \r | |
3229 | return self._BuildTargets\r | |
3230 | \r | |
3231 | @cached_property\r | |
3232 | def IntroTargetList(self):\r | |
3233 | self.Targets\r | |
3234 | return self._IntroBuildTargetList\r | |
3235 | \r | |
3236 | @cached_property\r | |
3237 | def CodaTargetList(self):\r | |
3238 | self.Targets\r | |
3239 | return self._FinalBuildTargetList\r | |
3240 | \r | |
3241 | @cached_property\r | |
3242 | def FileTypes(self):\r | |
3243 | self.Targets\r | |
3244 | return self._FileTypes\r | |
3245 | \r | |
3246 | ## Get the list of package object the module depends on\r | |
3247 | #\r | |
3248 | # @retval list The package object list\r | |
3249 | #\r | |
3250 | @cached_property\r | |
3251 | def DependentPackageList(self):\r | |
3252 | return self.Module.Packages\r | |
3253 | \r | |
3254 | ## Return the list of auto-generated code file\r | |
3255 | #\r | |
3256 | # @retval list The list of auto-generated file\r | |
3257 | #\r | |
3258 | @cached_property\r | |
3259 | def AutoGenFileList(self):\r | |
3260 | AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r | |
3261 | UniStringBinBuffer = BytesIO()\r | |
3262 | IdfGenBinBuffer = BytesIO()\r | |
3263 | RetVal = {}\r | |
3264 | AutoGenC = TemplateString()\r | |
3265 | AutoGenH = TemplateString()\r | |
3266 | StringH = TemplateString()\r | |
3267 | StringIdf = TemplateString()\r | |
3268 | GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r | |
3269 | #\r | |
3270 | # AutoGen.c is generated if there are library classes in inf, or there are object files\r | |
3271 | #\r | |
3272 | if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r | |
3273 | or TAB_OBJECT_FILE in self.FileTypes):\r | |
3274 | AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r | |
3275 | RetVal[AutoFile] = str(AutoGenC)\r | |
3276 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3277 | if str(AutoGenH) != "":\r | |
3278 | AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r | |
3279 | RetVal[AutoFile] = str(AutoGenH)\r | |
3280 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3281 | if str(StringH) != "":\r | |
3282 | AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r | |
3283 | RetVal[AutoFile] = str(StringH)\r | |
3284 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3285 | if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r | |
3286 | AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r | |
3287 | RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r | |
3288 | AutoFile.IsBinary = True\r | |
3289 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3290 | if UniStringBinBuffer is not None:\r | |
3291 | UniStringBinBuffer.close()\r | |
3292 | if str(StringIdf) != "":\r | |
3293 | AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r | |
3294 | RetVal[AutoFile] = str(StringIdf)\r | |
3295 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3296 | if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r | |
3297 | AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r | |
3298 | RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r | |
3299 | AutoFile.IsBinary = True\r | |
3300 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
3301 | if IdfGenBinBuffer is not None:\r | |
3302 | IdfGenBinBuffer.close()\r | |
3303 | return RetVal\r | |
3304 | \r | |
3305 | ## Return the list of library modules explicitly or implicityly used by this module\r | |
3306 | @cached_property\r | |
3307 | def DependentLibraryList(self):\r | |
3308 | # only merge library classes and PCD for non-library module\r | |
3309 | if self.IsLibrary:\r | |
3310 | return []\r | |
3311 | if self.AutoGenVersion < 0x00010005:\r | |
3312 | return self.PlatformInfo.ResolveLibraryReference(self.Module)\r | |
3313 | return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r | |
3314 | \r | |
3315 | ## Get the list of PCDs from current module\r | |
3316 | #\r | |
3317 | # @retval list The list of PCD\r | |
3318 | #\r | |
3319 | @cached_property\r | |
3320 | def ModulePcdList(self):\r | |
3321 | # apply PCD settings from platform\r | |
3322 | RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r | |
3323 | ExtendCopyDictionaryLists(self._PcdComments, self.Module.PcdComments)\r | |
3324 | return RetVal\r | |
3325 | \r | |
3326 | ## Get the list of PCDs from dependent libraries\r | |
3327 | #\r | |
3328 | # @retval list The list of PCD\r | |
3329 | #\r | |
3330 | @cached_property\r | |
3331 | def LibraryPcdList(self):\r | |
3332 | if self.IsLibrary:\r | |
3333 | return []\r | |
3334 | RetVal = []\r | |
3335 | Pcds = set()\r | |
3336 | # get PCDs from dependent libraries\r | |
3337 | for Library in self.DependentLibraryList:\r | |
3338 | PcdsInLibrary = OrderedDict()\r | |
3339 | ExtendCopyDictionaryLists(self._PcdComments, Library.PcdComments)\r | |
3340 | for Key in Library.Pcds:\r | |
3341 | # skip duplicated PCDs\r | |
3342 | if Key in self.Module.Pcds or Key in Pcds:\r | |
3343 | continue\r | |
3344 | Pcds.add(Key)\r | |
3345 | PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r | |
3346 | RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r | |
3347 | return RetVal\r | |
3348 | \r | |
3349 | ## Get the GUID value mapping\r | |
3350 | #\r | |
3351 | # @retval dict The mapping between GUID cname and its value\r | |
3352 | #\r | |
3353 | @cached_property\r | |
3354 | def GuidList(self):\r | |
3355 | RetVal = OrderedDict(self.Module.Guids)\r | |
3356 | for Library in self.DependentLibraryList:\r | |
3357 | RetVal.update(Library.Guids)\r | |
3358 | ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r | |
3359 | ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r | |
3360 | return RetVal\r | |
3361 | \r | |
3362 | @cached_property\r | |
3363 | def GetGuidsUsedByPcd(self):\r | |
3364 | RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r | |
3365 | for Library in self.DependentLibraryList:\r | |
3366 | RetVal.update(Library.GetGuidsUsedByPcd())\r | |
3367 | return RetVal\r | |
3368 | ## Get the protocol value mapping\r | |
3369 | #\r | |
3370 | # @retval dict The mapping between protocol cname and its value\r | |
3371 | #\r | |
3372 | @cached_property\r | |
3373 | def ProtocolList(self):\r | |
3374 | RetVal = OrderedDict(self.Module.Protocols)\r | |
3375 | for Library in self.DependentLibraryList:\r | |
3376 | RetVal.update(Library.Protocols)\r | |
3377 | ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r | |
3378 | ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r | |
3379 | return RetVal\r | |
3380 | \r | |
3381 | ## Get the PPI value mapping\r | |
3382 | #\r | |
3383 | # @retval dict The mapping between PPI cname and its value\r | |
3384 | #\r | |
3385 | @cached_property\r | |
3386 | def PpiList(self):\r | |
3387 | RetVal = OrderedDict(self.Module.Ppis)\r | |
3388 | for Library in self.DependentLibraryList:\r | |
3389 | RetVal.update(Library.Ppis)\r | |
3390 | ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r | |
3391 | ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r | |
3392 | return RetVal\r | |
3393 | \r | |
3394 | ## Get the list of include search path\r | |
3395 | #\r | |
3396 | # @retval list The list path\r | |
3397 | #\r | |
3398 | @cached_property\r | |
3399 | def IncludePathList(self):\r | |
3400 | RetVal = []\r | |
3401 | if self.AutoGenVersion < 0x00010005:\r | |
3402 | for Inc in self.Module.Includes:\r | |
3403 | if Inc not in RetVal:\r | |
3404 | RetVal.append(Inc)\r | |
3405 | # for Edk modules\r | |
3406 | Inc = path.join(Inc, self.Arch.capitalize())\r | |
3407 | if os.path.exists(Inc) and Inc not in RetVal:\r | |
3408 | RetVal.append(Inc)\r | |
3409 | # Edk module needs to put DEBUG_DIR at the end of search path and not to use SOURCE_DIR all the time\r | |
3410 | RetVal.append(self.DebugDir)\r | |
3411 | else:\r | |
3412 | RetVal.append(self.MetaFile.Dir)\r | |
3413 | RetVal.append(self.DebugDir)\r | |
3414 | \r | |
3415 | for Package in self.Module.Packages:\r | |
3416 | PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r | |
3417 | if PackageDir not in RetVal:\r | |
3418 | RetVal.append(PackageDir)\r | |
3419 | IncludesList = Package.Includes\r | |
3420 | if Package._PrivateIncludes:\r | |
3421 | if not self.MetaFile.Path.startswith(PackageDir):\r | |
3422 | IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r | |
3423 | for Inc in IncludesList:\r | |
3424 | if Inc not in RetVal:\r | |
3425 | RetVal.append(str(Inc))\r | |
3426 | return RetVal\r | |
3427 | \r | |
3428 | @cached_property\r | |
3429 | def IncludePathLength(self):\r | |
3430 | return sum(len(inc)+1 for inc in self.IncludePathList)\r | |
3431 | \r | |
3432 | ## Get HII EX PCDs which maybe used by VFR\r | |
3433 | #\r | |
3434 | # efivarstore used by VFR may relate with HII EX PCDs\r | |
3435 | # Get the variable name and GUID from efivarstore and HII EX PCD\r | |
3436 | # List the HII EX PCDs in As Built INF if both name and GUID match.\r | |
3437 | #\r | |
3438 | # @retval list HII EX PCDs\r | |
3439 | #\r | |
3440 | def _GetPcdsMaybeUsedByVfr(self):\r | |
3441 | if not self.SourceFileList:\r | |
3442 | return []\r | |
3443 | \r | |
3444 | NameGuids = set()\r | |
3445 | for SrcFile in self.SourceFileList:\r | |
3446 | if SrcFile.Ext.lower() != '.vfr':\r | |
3447 | continue\r | |
3448 | Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r | |
3449 | if not os.path.exists(Vfri):\r | |
3450 | continue\r | |
3451 | VfriFile = open(Vfri, 'r')\r | |
3452 | Content = VfriFile.read()\r | |
3453 | VfriFile.close()\r | |
3454 | Pos = Content.find('efivarstore')\r | |
3455 | while Pos != -1:\r | |
3456 | #\r | |
3457 | # Make sure 'efivarstore' is the start of efivarstore statement\r | |
3458 | # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r | |
3459 | #\r | |
3460 | Index = Pos - 1\r | |
3461 | while Index >= 0 and Content[Index] in ' \t\r\n':\r | |
3462 | Index -= 1\r | |
3463 | if Index >= 0 and Content[Index] != ';':\r | |
3464 | Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r | |
3465 | continue\r | |
3466 | #\r | |
3467 | # 'efivarstore' must be followed by name and guid\r | |
3468 | #\r | |
3469 | Name = gEfiVarStoreNamePattern.search(Content, Pos)\r | |
3470 | if not Name:\r | |
3471 | break\r | |
3472 | Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r | |
3473 | if not Guid:\r | |
3474 | break\r | |
3475 | NameArray = ConvertStringToByteArray('L"' + Name.group(1) + '"')\r | |
3476 | NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r | |
3477 | Pos = Content.find('efivarstore', Name.end())\r | |
3478 | if not NameGuids:\r | |
3479 | return []\r | |
3480 | HiiExPcds = []\r | |
3481 | for Pcd in self.PlatformInfo.Platform.Pcds.values():\r | |
3482 | if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r | |
3483 | continue\r | |
3484 | for SkuInfo in Pcd.SkuInfoList.values():\r | |
3485 | Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r | |
3486 | if not Value:\r | |
3487 | continue\r | |
3488 | Name = ConvertStringToByteArray(SkuInfo.VariableName)\r | |
3489 | Guid = GuidStructureStringToGuidString(Value)\r | |
3490 | if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r | |
3491 | HiiExPcds.append(Pcd)\r | |
3492 | break\r | |
3493 | \r | |
3494 | return HiiExPcds\r | |
3495 | \r | |
3496 | def _GenOffsetBin(self):\r | |
3497 | VfrUniBaseName = {}\r | |
3498 | for SourceFile in self.Module.Sources:\r | |
3499 | if SourceFile.Type.upper() == ".VFR" :\r | |
3500 | #\r | |
3501 | # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r | |
3502 | #\r | |
3503 | VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r | |
3504 | elif SourceFile.Type.upper() == ".UNI" :\r | |
3505 | #\r | |
3506 | # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r | |
3507 | #\r | |
3508 | VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r | |
3509 | \r | |
3510 | if not VfrUniBaseName:\r | |
3511 | return None\r | |
3512 | MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r | |
3513 | EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r | |
3514 | VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())\r | |
3515 | if not VfrUniOffsetList:\r | |
3516 | return None\r | |
3517 | \r | |
3518 | OutputName = '%sOffset.bin' % self.Name\r | |
3519 | UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r | |
3520 | \r | |
3521 | try:\r | |
3522 | fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r | |
3523 | except:\r | |
3524 | EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r | |
3525 | \r | |
3526 | # Use a instance of BytesIO to cache data\r | |
3527 | fStringIO = BytesIO('')\r | |
3528 | \r | |
3529 | for Item in VfrUniOffsetList:\r | |
3530 | if (Item[0].find("Strings") != -1):\r | |
3531 | #\r | |
3532 | # UNI offset in image.\r | |
3533 | # GUID + Offset\r | |
3534 | # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r | |
3535 | #\r | |
3536 | UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r | |
3537 | UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r | |
3538 | fStringIO.write(''.join(UniGuid))\r | |
3539 | UniValue = pack ('Q', int (Item[1], 16))\r | |
3540 | fStringIO.write (UniValue)\r | |
3541 | else:\r | |
3542 | #\r | |
3543 | # VFR binary offset in image.\r | |
3544 | # GUID + Offset\r | |
3545 | # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r | |
3546 | #\r | |
3547 | VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r | |
3548 | VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r | |
3549 | fStringIO.write(''.join(VfrGuid))\r | |
3550 | VfrValue = pack ('Q', int (Item[1], 16))\r | |
3551 | fStringIO.write (VfrValue)\r | |
3552 | #\r | |
3553 | # write data into file.\r | |
3554 | #\r | |
3555 | try :\r | |
3556 | fInputfile.write (fStringIO.getvalue())\r | |
3557 | except:\r | |
3558 | EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r | |
3559 | "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r | |
3560 | \r | |
3561 | fStringIO.close ()\r | |
3562 | fInputfile.close ()\r | |
3563 | return OutputName\r | |
3564 | \r | |
3565 | ## Create AsBuilt INF file the module\r | |
3566 | #\r | |
3567 | def CreateAsBuiltInf(self, IsOnlyCopy = False):\r | |
3568 | self.OutputFile = set()\r | |
3569 | if IsOnlyCopy and GlobalData.gBinCacheDest:\r | |
3570 | self.CopyModuleToCache()\r | |
3571 | return\r | |
3572 | \r | |
3573 | if self.IsAsBuiltInfCreated:\r | |
3574 | return\r | |
3575 | \r | |
3576 | # Skip the following code for EDK I inf\r | |
3577 | if self.AutoGenVersion < 0x00010005:\r | |
3578 | return\r | |
3579 | \r | |
3580 | # Skip the following code for libraries\r | |
3581 | if self.IsLibrary:\r | |
3582 | return\r | |
3583 | \r | |
3584 | # Skip the following code for modules with no source files\r | |
3585 | if not self.SourceFileList:\r | |
3586 | return\r | |
3587 | \r | |
3588 | # Skip the following code for modules without any binary files\r | |
3589 | if self.BinaryFileList:\r | |
3590 | return\r | |
3591 | \r | |
3592 | ### TODO: How to handles mixed source and binary modules\r | |
3593 | \r | |
3594 | # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r | |
3595 | # Also find all packages that the DynamicEx PCDs depend on\r | |
3596 | Pcds = []\r | |
3597 | PatchablePcds = []\r | |
3598 | Packages = []\r | |
3599 | PcdCheckList = []\r | |
3600 | PcdTokenSpaceList = []\r | |
3601 | for Pcd in self.ModulePcdList + self.LibraryPcdList:\r | |
3602 | if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
3603 | PatchablePcds.append(Pcd)\r | |
3604 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r | |
3605 | elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
3606 | if Pcd not in Pcds:\r | |
3607 | Pcds.append(Pcd)\r | |
3608 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r | |
3609 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r | |
3610 | PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r | |
3611 | GuidList = OrderedDict(self.GuidList)\r | |
3612 | for TokenSpace in self.GetGuidsUsedByPcd:\r | |
3613 | # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r | |
3614 | # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r | |
3615 | if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r | |
3616 | GuidList.pop(TokenSpace)\r | |
3617 | CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r | |
3618 | for Package in self.DerivedPackageList:\r | |
3619 | if Package in Packages:\r | |
3620 | continue\r | |
3621 | BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r | |
3622 | Found = False\r | |
3623 | for Index in range(len(BeChecked)):\r | |
3624 | for Item in CheckList[Index]:\r | |
3625 | if Item in BeChecked[Index]:\r | |
3626 | Packages.append(Package)\r | |
3627 | Found = True\r | |
3628 | break\r | |
3629 | if Found:\r | |
3630 | break\r | |
3631 | \r | |
3632 | VfrPcds = self._GetPcdsMaybeUsedByVfr()\r | |
3633 | for Pkg in self.PlatformInfo.PackageList:\r | |
3634 | if Pkg in Packages:\r | |
3635 | continue\r | |
3636 | for VfrPcd in VfrPcds:\r | |
3637 | if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r | |
3638 | (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r | |
3639 | Packages.append(Pkg)\r | |
3640 | break\r | |
3641 | \r | |
3642 | ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r | |
3643 | DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r | |
3644 | Guid = self.Guid\r | |
3645 | MDefs = self.Module.Defines\r | |
3646 | \r | |
3647 | AsBuiltInfDict = {\r | |
3648 | 'module_name' : self.Name,\r | |
3649 | 'module_guid' : Guid,\r | |
3650 | 'module_module_type' : ModuleType,\r | |
3651 | 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r | |
3652 | 'pcd_is_driver_string' : [],\r | |
3653 | 'module_uefi_specification_version' : [],\r | |
3654 | 'module_pi_specification_version' : [],\r | |
3655 | 'module_entry_point' : self.Module.ModuleEntryPointList,\r | |
3656 | 'module_unload_image' : self.Module.ModuleUnloadImageList,\r | |
3657 | 'module_constructor' : self.Module.ConstructorList,\r | |
3658 | 'module_destructor' : self.Module.DestructorList,\r | |
3659 | 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r | |
3660 | 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r | |
3661 | 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r | |
3662 | 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r | |
3663 | 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r | |
3664 | 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r | |
3665 | 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r | |
3666 | 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r | |
3667 | 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r | |
3668 | 'module_arch' : self.Arch,\r | |
3669 | 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r | |
3670 | 'binary_item' : [],\r | |
3671 | 'patchablepcd_item' : [],\r | |
3672 | 'pcd_item' : [],\r | |
3673 | 'protocol_item' : [],\r | |
3674 | 'ppi_item' : [],\r | |
3675 | 'guid_item' : [],\r | |
3676 | 'flags_item' : [],\r | |
3677 | 'libraryclasses_item' : []\r | |
3678 | }\r | |
3679 | \r | |
3680 | if 'MODULE_UNI_FILE' in MDefs:\r | |
3681 | UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r | |
3682 | if os.path.isfile(UNIFile):\r | |
3683 | shutil.copy2(UNIFile, self.OutputDir)\r | |
3684 | \r | |
3685 | if self.AutoGenVersion > int(gInfSpecVersion, 0):\r | |
3686 | AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r | |
3687 | else:\r | |
3688 | AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r | |
3689 | \r | |
3690 | if DriverType:\r | |
3691 | AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r | |
3692 | \r | |
3693 | if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r | |
3694 | AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r | |
3695 | if 'PI_SPECIFICATION_VERSION' in self.Specification:\r | |
3696 | AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r | |
3697 | \r | |
3698 | OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r | |
3699 | DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r | |
3700 | for Item in self.CodaTargetList:\r | |
3701 | File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r | |
3702 | self.OutputFile.add(File)\r | |
3703 | if os.path.isabs(File):\r | |
3704 | File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r | |
3705 | if Item.Target.Ext.lower() == '.aml':\r | |
3706 | AsBuiltInfDict['binary_item'].append('ASL|' + File)\r | |
3707 | elif Item.Target.Ext.lower() == '.acpi':\r | |
3708 | AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r | |
3709 | elif Item.Target.Ext.lower() == '.efi':\r | |
3710 | AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r | |
3711 | else:\r | |
3712 | AsBuiltInfDict['binary_item'].append('BIN|' + File)\r | |
3713 | if self.DepexGenerated:\r | |
3714 | self.OutputFile.add(self.Name + '.depex')\r | |
3715 | if self.ModuleType in [SUP_MODULE_PEIM]:\r | |
3716 | AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r | |
3717 | elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r | |
3718 | AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r | |
3719 | elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r | |
3720 | AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r | |
3721 | \r | |
3722 | Bin = self._GenOffsetBin()\r | |
3723 | if Bin:\r | |
3724 | AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r | |
3725 | self.OutputFile.add(Bin)\r | |
3726 | \r | |
3727 | for Root, Dirs, Files in os.walk(OutputDir):\r | |
3728 | for File in Files:\r | |
3729 | if File.lower().endswith('.pdb'):\r | |
3730 | AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r | |
3731 | self.OutputFile.add(File)\r | |
3732 | HeaderComments = self.Module.HeaderComments\r | |
3733 | StartPos = 0\r | |
3734 | for Index in range(len(HeaderComments)):\r | |
3735 | if HeaderComments[Index].find('@BinaryHeader') != -1:\r | |
3736 | HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r | |
3737 | StartPos = Index\r | |
3738 | break\r | |
3739 | AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r | |
3740 | AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r | |
3741 | \r | |
3742 | GenList = [\r | |
3743 | (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r | |
3744 | (self.PpiList, self._PpiComments, 'ppi_item'),\r | |
3745 | (GuidList, self._GuidComments, 'guid_item')\r | |
3746 | ]\r | |
3747 | for Item in GenList:\r | |
3748 | for CName in Item[0]:\r | |
3749 | Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r | |
3750 | Entry = Comments + '\n ' + CName if Comments else CName\r | |
3751 | AsBuiltInfDict[Item[2]].append(Entry)\r | |
3752 | PatchList = parsePcdInfoFromMapFile(\r | |
3753 | os.path.join(self.OutputDir, self.Name + '.map'),\r | |
3754 | os.path.join(self.OutputDir, self.Name + '.efi')\r | |
3755 | )\r | |
3756 | if PatchList:\r | |
3757 | for Pcd in PatchablePcds:\r | |
3758 | TokenCName = Pcd.TokenCName\r | |
3759 | for PcdItem in GlobalData.MixedPcd:\r | |
3760 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
3761 | TokenCName = PcdItem[0]\r | |
3762 | break\r | |
3763 | for PatchPcd in PatchList:\r | |
3764 | if TokenCName == PatchPcd[0]:\r | |
3765 | break\r | |
3766 | else:\r | |
3767 | continue\r | |
3768 | PcdValue = ''\r | |
3769 | if Pcd.DatumType == 'BOOLEAN':\r | |
3770 | BoolValue = Pcd.DefaultValue.upper()\r | |
3771 | if BoolValue == 'TRUE':\r | |
3772 | Pcd.DefaultValue = '1'\r | |
3773 | elif BoolValue == 'FALSE':\r | |
3774 | Pcd.DefaultValue = '0'\r | |
3775 | \r | |
3776 | if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r | |
3777 | HexFormat = '0x%02x'\r | |
3778 | if Pcd.DatumType == TAB_UINT16:\r | |
3779 | HexFormat = '0x%04x'\r | |
3780 | elif Pcd.DatumType == TAB_UINT32:\r | |
3781 | HexFormat = '0x%08x'\r | |
3782 | elif Pcd.DatumType == TAB_UINT64:\r | |
3783 | HexFormat = '0x%016x'\r | |
3784 | PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r | |
3785 | else:\r | |
3786 | if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r | |
3787 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
3788 | "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
3789 | )\r | |
3790 | ArraySize = int(Pcd.MaxDatumSize, 0)\r | |
3791 | PcdValue = Pcd.DefaultValue\r | |
3792 | if PcdValue[0] != '{':\r | |
3793 | Unicode = False\r | |
3794 | if PcdValue[0] == 'L':\r | |
3795 | Unicode = True\r | |
3796 | PcdValue = PcdValue.lstrip('L')\r | |
3797 | PcdValue = eval(PcdValue)\r | |
3798 | NewValue = '{'\r | |
3799 | for Index in range(0, len(PcdValue)):\r | |
3800 | if Unicode:\r | |
3801 | CharVal = ord(PcdValue[Index])\r | |
3802 | NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r | |
3803 | + '0x%02x' % (CharVal >> 8) + ', '\r | |
3804 | else:\r | |
3805 | NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r | |
3806 | Padding = '0x00, '\r | |
3807 | if Unicode:\r | |
3808 | Padding = Padding * 2\r | |
3809 | ArraySize = ArraySize / 2\r | |
3810 | if ArraySize < (len(PcdValue) + 1):\r | |
3811 | if Pcd.MaxSizeUserSet:\r | |
3812 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
3813 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
3814 | )\r | |
3815 | else:\r | |
3816 | ArraySize = len(PcdValue) + 1\r | |
3817 | if ArraySize > len(PcdValue) + 1:\r | |
3818 | NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r | |
3819 | PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r | |
3820 | elif len(PcdValue.split(',')) <= ArraySize:\r | |
3821 | PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r | |
3822 | PcdValue += '}'\r | |
3823 | else:\r | |
3824 | if Pcd.MaxSizeUserSet:\r | |
3825 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
3826 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
3827 | )\r | |
3828 | else:\r | |
3829 | ArraySize = len(PcdValue) + 1\r | |
3830 | PcdItem = '%s.%s|%s|0x%X' % \\r | |
3831 | (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r | |
3832 | PcdComments = ''\r | |
3833 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
3834 | PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r | |
3835 | if PcdComments:\r | |
3836 | PcdItem = PcdComments + '\n ' + PcdItem\r | |
3837 | AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r | |
3838 | \r | |
3839 | for Pcd in Pcds + VfrPcds:\r | |
3840 | PcdCommentList = []\r | |
3841 | HiiInfo = ''\r | |
3842 | TokenCName = Pcd.TokenCName\r | |
3843 | for PcdItem in GlobalData.MixedPcd:\r | |
3844 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
3845 | TokenCName = PcdItem[0]\r | |
3846 | break\r | |
3847 | if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r | |
3848 | for SkuName in Pcd.SkuInfoList:\r | |
3849 | SkuInfo = Pcd.SkuInfoList[SkuName]\r | |
3850 | HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r | |
3851 | break\r | |
3852 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
3853 | PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r | |
3854 | if HiiInfo:\r | |
3855 | UsageIndex = -1\r | |
3856 | UsageStr = ''\r | |
3857 | for Index, Comment in enumerate(PcdCommentList):\r | |
3858 | for Usage in UsageList:\r | |
3859 | if Comment.find(Usage) != -1:\r | |
3860 | UsageStr = Usage\r | |
3861 | UsageIndex = Index\r | |
3862 | break\r | |
3863 | if UsageIndex != -1:\r | |
3864 | PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r | |
3865 | else:\r | |
3866 | PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r | |
3867 | PcdComments = '\n '.join(PcdCommentList)\r | |
3868 | PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r | |
3869 | if PcdComments:\r | |
3870 | PcdEntry = PcdComments + '\n ' + PcdEntry\r | |
3871 | AsBuiltInfDict['pcd_item'].append(PcdEntry)\r | |
3872 | for Item in self.BuildOption:\r | |
3873 | if 'FLAGS' in self.BuildOption[Item]:\r | |
3874 | AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r | |
3875 | \r | |
3876 | # Generated LibraryClasses section in comments.\r | |
3877 | for Library in self.LibraryAutoGenList:\r | |
3878 | AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r | |
3879 | \r | |
3880 | # Generated UserExtensions TianoCore section.\r | |
3881 | # All tianocore user extensions are copied.\r | |
3882 | UserExtStr = ''\r | |
3883 | for TianoCore in self._GetTianoCoreUserExtensionList():\r | |
3884 | UserExtStr += '\n'.join(TianoCore)\r | |
3885 | ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r | |
3886 | if os.path.isfile(ExtensionFile):\r | |
3887 | shutil.copy2(ExtensionFile, self.OutputDir)\r | |
3888 | AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r | |
3889 | \r | |
3890 | # Generated depex expression section in comments.\r | |
3891 | DepexExpresion = self._GetDepexExpresionString()\r | |
3892 | AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''\r | |
3893 | \r | |
3894 | AsBuiltInf = TemplateString()\r | |
3895 | AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r | |
3896 | \r | |
3897 | SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r | |
3898 | \r | |
3899 | self.IsAsBuiltInfCreated = True\r | |
3900 | if GlobalData.gBinCacheDest:\r | |
3901 | self.CopyModuleToCache()\r | |
3902 | \r | |
3903 | def CopyModuleToCache(self):\r | |
3904 | FileDir = path.join(GlobalData.gBinCacheDest, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
3905 | CreateDirectory (FileDir)\r | |
3906 | HashFile = path.join(self.BuildDir, self.Name + '.hash')\r | |
3907 | ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r | |
3908 | if os.path.exists(HashFile):\r | |
3909 | shutil.copy2(HashFile, FileDir)\r | |
3910 | if os.path.exists(ModuleFile):\r | |
3911 | shutil.copy2(ModuleFile, FileDir)\r | |
3912 | if not self.OutputFile:\r | |
3913 | Ma = self.Workspace.BuildDatabase[PathClass(ModuleFile), self.Arch, self.BuildTarget, self.ToolChain]\r | |
3914 | self.OutputFile = Ma.Binaries\r | |
3915 | if self.OutputFile:\r | |
3916 | for File in self.OutputFile:\r | |
3917 | File = str(File)\r | |
3918 | if not os.path.isabs(File):\r | |
3919 | File = os.path.join(self.OutputDir, File)\r | |
3920 | if os.path.exists(File):\r | |
3921 | shutil.copy2(File, FileDir)\r | |
3922 | \r | |
3923 | def AttemptModuleCacheCopy(self):\r | |
3924 | if self.IsBinaryModule:\r | |
3925 | return False\r | |
3926 | FileDir = path.join(GlobalData.gBinCacheSource, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
3927 | HashFile = path.join(FileDir, self.Name + '.hash')\r | |
3928 | if os.path.exists(HashFile):\r | |
3929 | f = open(HashFile, 'r')\r | |
3930 | CacheHash = f.read()\r | |
3931 | f.close()\r | |
3932 | if GlobalData.gModuleHash[self.Arch][self.Name]:\r | |
3933 | if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r | |
3934 | for root, dir, files in os.walk(FileDir):\r | |
3935 | for f in files:\r | |
3936 | if self.Name + '.hash' in f:\r | |
3937 | shutil.copy2(HashFile, self.BuildDir)\r | |
3938 | else:\r | |
3939 | File = path.join(root, f)\r | |
3940 | shutil.copy2(File, self.OutputDir)\r | |
3941 | if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r | |
3942 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
3943 | return True\r | |
3944 | return False\r | |
3945 | \r | |
3946 | ## Create makefile for the module and its dependent libraries\r | |
3947 | #\r | |
3948 | # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r | |
3949 | # dependent libraries will be created\r | |
3950 | #\r | |
3951 | @cached_class_function\r | |
3952 | def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r | |
3953 | # nest this function inside it's only caller.\r | |
3954 | def CreateTimeStamp():\r | |
3955 | FileSet = {self.MetaFile.Path}\r | |
3956 | \r | |
3957 | for SourceFile in self.Module.Sources:\r | |
3958 | FileSet.add (SourceFile.Path)\r | |
3959 | \r | |
3960 | for Lib in self.DependentLibraryList:\r | |
3961 | FileSet.add (Lib.MetaFile.Path)\r | |
3962 | \r | |
3963 | for f in self.AutoGenDepSet:\r | |
3964 | FileSet.add (f.Path)\r | |
3965 | \r | |
3966 | if os.path.exists (self.TimeStampPath):\r | |
3967 | os.remove (self.TimeStampPath)\r | |
3968 | with open(self.TimeStampPath, 'w+') as file:\r | |
3969 | for f in FileSet:\r | |
3970 | print(f, file=file)\r | |
3971 | \r | |
3972 | # Ignore generating makefile when it is a binary module\r | |
3973 | if self.IsBinaryModule:\r | |
3974 | return\r | |
3975 | \r | |
3976 | self.GenFfsList = GenFfsList\r | |
3977 | if not self.IsLibrary and CreateLibraryMakeFile:\r | |
3978 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
3979 | LibraryAutoGen.CreateMakeFile()\r | |
3980 | \r | |
3981 | if self.CanSkip():\r | |
3982 | return\r | |
3983 | \r | |
3984 | if len(self.CustomMakefile) == 0:\r | |
3985 | Makefile = GenMake.ModuleMakefile(self)\r | |
3986 | else:\r | |
3987 | Makefile = GenMake.CustomMakefile(self)\r | |
3988 | if Makefile.Generate():\r | |
3989 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r | |
3990 | (self.Name, self.Arch))\r | |
3991 | else:\r | |
3992 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r | |
3993 | (self.Name, self.Arch))\r | |
3994 | \r | |
3995 | CreateTimeStamp()\r | |
3996 | \r | |
3997 | def CopyBinaryFiles(self):\r | |
3998 | for File in self.Module.Binaries:\r | |
3999 | SrcPath = File.Path\r | |
4000 | DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r | |
4001 | CopyLongFilePath(SrcPath, DstPath)\r | |
4002 | ## Create autogen code for the module and its dependent libraries\r | |
4003 | #\r | |
4004 | # @param CreateLibraryCodeFile Flag indicating if or not the code of\r | |
4005 | # dependent libraries will be created\r | |
4006 | #\r | |
4007 | def CreateCodeFile(self, CreateLibraryCodeFile=True):\r | |
4008 | if self.IsCodeFileCreated:\r | |
4009 | return\r | |
4010 | \r | |
4011 | # Need to generate PcdDatabase even PcdDriver is binarymodule\r | |
4012 | if self.IsBinaryModule and self.PcdIsDriver != '':\r | |
4013 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
4014 | return\r | |
4015 | if self.IsBinaryModule:\r | |
4016 | if self.IsLibrary:\r | |
4017 | self.CopyBinaryFiles()\r | |
4018 | return\r | |
4019 | \r | |
4020 | if not self.IsLibrary and CreateLibraryCodeFile:\r | |
4021 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
4022 | LibraryAutoGen.CreateCodeFile()\r | |
4023 | \r | |
4024 | if self.CanSkip():\r | |
4025 | return\r | |
4026 | \r | |
4027 | AutoGenList = []\r | |
4028 | IgoredAutoGenList = []\r | |
4029 | \r | |
4030 | for File in self.AutoGenFileList:\r | |
4031 | if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r | |
4032 | #Ignore Edk AutoGen.c\r | |
4033 | if self.AutoGenVersion < 0x00010005 and File.Name == 'AutoGen.c':\r | |
4034 | continue\r | |
4035 | \r | |
4036 | AutoGenList.append(str(File))\r | |
4037 | else:\r | |
4038 | IgoredAutoGenList.append(str(File))\r | |
4039 | \r | |
4040 | # Skip the following code for EDK I inf\r | |
4041 | if self.AutoGenVersion < 0x00010005:\r | |
4042 | return\r | |
4043 | \r | |
4044 | for ModuleType in self.DepexList:\r | |
4045 | # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r | |
4046 | if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED:\r | |
4047 | continue\r | |
4048 | \r | |
4049 | Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r | |
4050 | DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r | |
4051 | \r | |
4052 | if len(Dpx.PostfixNotation) != 0:\r | |
4053 | self.DepexGenerated = True\r | |
4054 | \r | |
4055 | if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r | |
4056 | AutoGenList.append(str(DpxFile))\r | |
4057 | else:\r | |
4058 | IgoredAutoGenList.append(str(DpxFile))\r | |
4059 | \r | |
4060 | if IgoredAutoGenList == []:\r | |
4061 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r | |
4062 | (" ".join(AutoGenList), self.Name, self.Arch))\r | |
4063 | elif AutoGenList == []:\r | |
4064 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r | |
4065 | (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
4066 | else:\r | |
4067 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r | |
4068 | (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
4069 | \r | |
4070 | self.IsCodeFileCreated = True\r | |
4071 | return AutoGenList\r | |
4072 | \r | |
4073 | ## Summarize the ModuleAutoGen objects of all libraries used by this module\r | |
4074 | @cached_property\r | |
4075 | def LibraryAutoGenList(self):\r | |
4076 | RetVal = []\r | |
4077 | for Library in self.DependentLibraryList:\r | |
4078 | La = ModuleAutoGen(\r | |
4079 | self.Workspace,\r | |
4080 | Library.MetaFile,\r | |
4081 | self.BuildTarget,\r | |
4082 | self.ToolChain,\r | |
4083 | self.Arch,\r | |
4084 | self.PlatformInfo.MetaFile\r | |
4085 | )\r | |
4086 | if La not in RetVal:\r | |
4087 | RetVal.append(La)\r | |
4088 | for Lib in La.CodaTargetList:\r | |
4089 | self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r | |
4090 | return RetVal\r | |
4091 | \r | |
4092 | def GenModuleHash(self):\r | |
4093 | if self.Arch not in GlobalData.gModuleHash:\r | |
4094 | GlobalData.gModuleHash[self.Arch] = {}\r | |
4095 | m = hashlib.md5()\r | |
4096 | # Add Platform level hash\r | |
4097 | m.update(GlobalData.gPlatformHash)\r | |
4098 | # Add Package level hash\r | |
4099 | if self.DependentPackageList:\r | |
4100 | for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r | |
4101 | if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r | |
4102 | m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r | |
4103 | \r | |
4104 | # Add Library hash\r | |
4105 | if self.LibraryAutoGenList:\r | |
4106 | for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r | |
4107 | if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r | |
4108 | Lib.GenModuleHash()\r | |
4109 | m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r | |
4110 | \r | |
4111 | # Add Module self\r | |
4112 | f = open(str(self.MetaFile), 'r')\r | |
4113 | Content = f.read()\r | |
4114 | f.close()\r | |
4115 | m.update(Content)\r | |
4116 | # Add Module's source files\r | |
4117 | if self.SourceFileList:\r | |
4118 | for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r | |
4119 | f = open(str(File), 'r')\r | |
4120 | Content = f.read()\r | |
4121 | f.close()\r | |
4122 | m.update(Content)\r | |
4123 | \r | |
4124 | ModuleHashFile = path.join(self.BuildDir, self.Name + ".hash")\r | |
4125 | if self.Name not in GlobalData.gModuleHash[self.Arch]:\r | |
4126 | GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r | |
4127 | if GlobalData.gBinCacheSource:\r | |
4128 | if self.AttemptModuleCacheCopy():\r | |
4129 | return False\r | |
4130 | return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)\r | |
4131 | \r | |
4132 | ## Decide whether we can skip the ModuleAutoGen process\r | |
4133 | def CanSkipbyHash(self):\r | |
4134 | if GlobalData.gUseHashCache:\r | |
4135 | return not self.GenModuleHash()\r | |
4136 | return False\r | |
4137 | \r | |
4138 | ## Decide whether we can skip the ModuleAutoGen process\r | |
4139 | # If any source file is newer than the module than we cannot skip\r | |
4140 | #\r | |
4141 | def CanSkip(self):\r | |
4142 | if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r | |
4143 | return True\r | |
4144 | if not os.path.exists(self.TimeStampPath):\r | |
4145 | return False\r | |
4146 | #last creation time of the module\r | |
4147 | DstTimeStamp = os.stat(self.TimeStampPath)[8]\r | |
4148 | \r | |
4149 | SrcTimeStamp = self.Workspace._SrcTimeStamp\r | |
4150 | if SrcTimeStamp > DstTimeStamp:\r | |
4151 | return False\r | |
4152 | \r | |
4153 | with open(self.TimeStampPath,'r') as f:\r | |
4154 | for source in f:\r | |
4155 | source = source.rstrip('\n')\r | |
4156 | if not os.path.exists(source):\r | |
4157 | return False\r | |
4158 | if source not in ModuleAutoGen.TimeDict :\r | |
4159 | ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r | |
4160 | if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r | |
4161 | return False\r | |
4162 | GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r | |
4163 | return True\r | |
4164 | \r | |
4165 | @cached_property\r | |
4166 | def TimeStampPath(self):\r | |
4167 | return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r |