]>
Commit | Line | Data |
---|---|---|
e8449e1d FB |
1 | ## @file\r |
2 | # Create makefile for MS nmake and GNU make\r | |
3 | #\r | |
4 | # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r | |
5 | # SPDX-License-Identifier: BSD-2-Clause-Patent\r | |
6 | #\r | |
7 | from __future__ import absolute_import\r | |
8 | from AutoGen.AutoGen import AutoGen\r | |
9 | from Common.LongFilePathSupport import CopyLongFilePath\r | |
10 | from Common.BuildToolError import *\r | |
11 | from Common.DataType import *\r | |
12 | from Common.Misc import *\r | |
13 | from Common.StringUtils import NormPath,GetSplitList\r | |
14 | from collections import defaultdict\r | |
15 | from Workspace.WorkspaceCommon import OrderedListDict\r | |
16 | import os.path as path\r | |
17 | import copy\r | |
18 | import hashlib\r | |
19 | from . import InfSectionParser\r | |
20 | from . import GenC\r | |
21 | from . import GenMake\r | |
22 | from . import GenDepex\r | |
23 | from io import BytesIO\r | |
24 | from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r | |
25 | from Workspace.MetaFileCommentParser import UsageList\r | |
26 | from .GenPcdDb import CreatePcdDatabaseCode\r | |
27 | from Common.caching import cached_class_function\r | |
28 | from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r | |
29 | \r | |
30 | ## Mapping Makefile type\r | |
31 | gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r | |
32 | #\r | |
33 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
34 | # is the former use /I , the Latter used -I to specify include directories\r | |
35 | #\r | |
36 | gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
37 | gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
38 | \r | |
39 | ## default file name for AutoGen\r | |
40 | gAutoGenCodeFileName = "AutoGen.c"\r | |
41 | gAutoGenHeaderFileName = "AutoGen.h"\r | |
42 | gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r | |
43 | gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r | |
44 | gAutoGenDepexFileName = "%(module_name)s.depex"\r | |
45 | gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r | |
46 | gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r | |
47 | gInfSpecVersion = "0x00010017"\r | |
48 | \r | |
49 | #\r | |
50 | # Match name = variable\r | |
51 | #\r | |
52 | gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r | |
53 | #\r | |
54 | # The format of guid in efivarstore statement likes following and must be correct:\r | |
55 | # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r | |
56 | #\r | |
57 | gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r | |
58 | \r | |
59 | #\r | |
60 | # Template string to generic AsBuilt INF\r | |
61 | #\r | |
62 | gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r | |
63 | \r | |
64 | # DO NOT EDIT\r | |
65 | # FILE auto-generated\r | |
66 | \r | |
67 | [Defines]\r | |
68 | INF_VERSION = ${module_inf_version}\r | |
69 | BASE_NAME = ${module_name}\r | |
70 | FILE_GUID = ${module_guid}\r | |
71 | MODULE_TYPE = ${module_module_type}${BEGIN}\r | |
72 | VERSION_STRING = ${module_version_string}${END}${BEGIN}\r | |
73 | PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r | |
74 | UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r | |
75 | PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r | |
76 | ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r | |
77 | UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r | |
78 | CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r | |
79 | DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r | |
80 | SHADOW = ${module_shadow}${END}${BEGIN}\r | |
81 | PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r | |
82 | PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r | |
83 | PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r | |
84 | PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r | |
85 | BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r | |
86 | SPEC = ${module_spec}${END}${BEGIN}\r | |
87 | UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r | |
88 | MODULE_UNI_FILE = ${module_uni_file}${END}\r | |
89 | \r | |
90 | [Packages.${module_arch}]${BEGIN}\r | |
91 | ${package_item}${END}\r | |
92 | \r | |
93 | [Binaries.${module_arch}]${BEGIN}\r | |
94 | ${binary_item}${END}\r | |
95 | \r | |
96 | [PatchPcd.${module_arch}]${BEGIN}\r | |
97 | ${patchablepcd_item}\r | |
98 | ${END}\r | |
99 | \r | |
100 | [Protocols.${module_arch}]${BEGIN}\r | |
101 | ${protocol_item}\r | |
102 | ${END}\r | |
103 | \r | |
104 | [Ppis.${module_arch}]${BEGIN}\r | |
105 | ${ppi_item}\r | |
106 | ${END}\r | |
107 | \r | |
108 | [Guids.${module_arch}]${BEGIN}\r | |
109 | ${guid_item}\r | |
110 | ${END}\r | |
111 | \r | |
112 | [PcdEx.${module_arch}]${BEGIN}\r | |
113 | ${pcd_item}\r | |
114 | ${END}\r | |
115 | \r | |
116 | [LibraryClasses.${module_arch}]\r | |
117 | ## @LIB_INSTANCES${BEGIN}\r | |
118 | # ${libraryclasses_item}${END}\r | |
119 | \r | |
120 | ${depexsection_item}\r | |
121 | \r | |
122 | ${userextension_tianocore_item}\r | |
123 | \r | |
124 | ${tail_comments}\r | |
125 | \r | |
126 | [BuildOptions.${module_arch}]\r | |
127 | ## @AsBuilt${BEGIN}\r | |
128 | ## ${flags_item}${END}\r | |
129 | """)\r | |
130 | #\r | |
131 | # extend lists contained in a dictionary with lists stored in another dictionary\r | |
132 | # if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r | |
133 | #\r | |
134 | def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r | |
135 | for Key in CopyFromDict:\r | |
136 | CopyToDict[Key].extend(CopyFromDict[Key])\r | |
137 | \r | |
138 | # Create a directory specified by a set of path elements and return the full path\r | |
139 | def _MakeDir(PathList):\r | |
140 | RetVal = path.join(*PathList)\r | |
141 | CreateDirectory(RetVal)\r | |
142 | return RetVal\r | |
143 | \r | |
144 | #\r | |
145 | # Convert string to C format array\r | |
146 | #\r | |
147 | def _ConvertStringToByteArray(Value):\r | |
148 | Value = Value.strip()\r | |
149 | if not Value:\r | |
150 | return None\r | |
151 | if Value[0] == '{':\r | |
152 | if not Value.endswith('}'):\r | |
153 | return None\r | |
154 | Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r | |
155 | ValFields = Value.split(',')\r | |
156 | try:\r | |
157 | for Index in range(len(ValFields)):\r | |
158 | ValFields[Index] = str(int(ValFields[Index], 0))\r | |
159 | except ValueError:\r | |
160 | return None\r | |
161 | Value = '{' + ','.join(ValFields) + '}'\r | |
162 | return Value\r | |
163 | \r | |
164 | Unicode = False\r | |
165 | if Value.startswith('L"'):\r | |
166 | if not Value.endswith('"'):\r | |
167 | return None\r | |
168 | Value = Value[1:]\r | |
169 | Unicode = True\r | |
170 | elif not Value.startswith('"') or not Value.endswith('"'):\r | |
171 | return None\r | |
172 | \r | |
173 | Value = eval(Value) # translate escape character\r | |
174 | NewValue = '{'\r | |
175 | for Index in range(0, len(Value)):\r | |
176 | if Unicode:\r | |
177 | NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r | |
178 | else:\r | |
179 | NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r | |
180 | Value = NewValue + '0}'\r | |
181 | return Value\r | |
182 | \r | |
183 | ## ModuleAutoGen class\r | |
184 | #\r | |
185 | # This class encapsules the AutoGen behaviors for the build tools. In addition to\r | |
186 | # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r | |
187 | # to the [depex] section in module's inf file.\r | |
188 | #\r | |
189 | class ModuleAutoGen(AutoGen):\r | |
190 | # call super().__init__ then call the worker function with different parameter count\r | |
191 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
192 | if not hasattr(self, "_Init"):\r | |
193 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r | |
194 | self._Init = True\r | |
195 | \r | |
196 | ## Cache the timestamps of metafiles of every module in a class attribute\r | |
197 | #\r | |
198 | TimeDict = {}\r | |
199 | \r | |
200 | def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
201 | # check if this module is employed by active platform\r | |
202 | if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):\r | |
203 | EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r | |
204 | % (MetaFile, Arch))\r | |
205 | return None\r | |
206 | return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
207 | \r | |
208 | ## Initialize ModuleAutoGen\r | |
209 | #\r | |
210 | # @param Workspace EdkIIWorkspaceBuild object\r | |
211 | # @param ModuleFile The path of module file\r | |
212 | # @param Target Build target (DEBUG, RELEASE)\r | |
213 | # @param Toolchain Name of tool chain\r | |
214 | # @param Arch The arch the module supports\r | |
215 | # @param PlatformFile Platform meta-file\r | |
216 | #\r | |
217 | def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):\r | |
218 | EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r | |
219 | GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r | |
220 | \r | |
a000d5d1 | 221 | self.Workspace = Workspace\r |
e8449e1d FB |
222 | self.WorkspaceDir = ""\r |
223 | self.PlatformInfo = None\r | |
224 | self.DataPipe = DataPipe\r | |
225 | self.__init_platform_info__()\r | |
226 | self.MetaFile = ModuleFile\r | |
227 | self.SourceDir = self.MetaFile.SubDir\r | |
228 | self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r | |
229 | \r | |
230 | self.ToolChain = Toolchain\r | |
231 | self.BuildTarget = Target\r | |
232 | self.Arch = Arch\r | |
233 | self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r | |
234 | self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r | |
235 | \r | |
236 | self.IsCodeFileCreated = False\r | |
237 | self.IsAsBuiltInfCreated = False\r | |
238 | self.DepexGenerated = False\r | |
239 | \r | |
240 | self.BuildDatabase = self.Workspace.BuildDatabase\r | |
241 | self.BuildRuleOrder = None\r | |
242 | self.BuildTime = 0\r | |
243 | \r | |
244 | self._GuidComments = OrderedListDict()\r | |
245 | self._ProtocolComments = OrderedListDict()\r | |
246 | self._PpiComments = OrderedListDict()\r | |
247 | self._BuildTargets = None\r | |
248 | self._IntroBuildTargetList = None\r | |
249 | self._FinalBuildTargetList = None\r | |
250 | self._FileTypes = None\r | |
251 | \r | |
252 | self.AutoGenDepSet = set()\r | |
253 | self.ReferenceModules = []\r | |
254 | self.ConstPcd = {}\r | |
255 | \r | |
256 | def __init_platform_info__(self):\r | |
257 | pinfo = self.DataPipe.Get("P_Info")\r | |
e8449e1d FB |
258 | self.WorkspaceDir = pinfo.get("WorkspaceDir")\r |
259 | self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)\r | |
260 | ## hash() operator of ModuleAutoGen\r | |
261 | #\r | |
262 | # The module file path and arch string will be used to represent\r | |
263 | # hash value of this object\r | |
264 | #\r | |
265 | # @retval int Hash value of the module file path and arch\r | |
266 | #\r | |
267 | @cached_class_function\r | |
268 | def __hash__(self):\r | |
269 | return hash((self.MetaFile, self.Arch))\r | |
270 | def __repr__(self):\r | |
271 | return "%s [%s]" % (self.MetaFile, self.Arch)\r | |
272 | \r | |
273 | # Get FixedAtBuild Pcds of this Module\r | |
274 | @cached_property\r | |
275 | def FixedAtBuildPcds(self):\r | |
276 | RetVal = []\r | |
277 | for Pcd in self.ModulePcdList:\r | |
278 | if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r | |
279 | continue\r | |
280 | if Pcd not in RetVal:\r | |
281 | RetVal.append(Pcd)\r | |
282 | return RetVal\r | |
283 | \r | |
284 | @cached_property\r | |
285 | def FixedVoidTypePcds(self):\r | |
286 | RetVal = {}\r | |
287 | for Pcd in self.FixedAtBuildPcds:\r | |
288 | if Pcd.DatumType == TAB_VOID:\r | |
289 | if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:\r | |
290 | RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue\r | |
291 | return RetVal\r | |
292 | \r | |
293 | @property\r | |
294 | def UniqueBaseName(self):\r | |
295 | ModuleNames = self.DataPipe.Get("M_Name")\r | |
296 | if not ModuleNames:\r | |
297 | return self.Name\r | |
76e12fa3 | 298 | return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r |
e8449e1d FB |
299 | \r |
300 | # Macros could be used in build_rule.txt (also Makefile)\r | |
301 | @cached_property\r | |
302 | def Macros(self):\r | |
303 | return OrderedDict((\r | |
304 | ("WORKSPACE" ,self.WorkspaceDir),\r | |
305 | ("MODULE_NAME" ,self.Name),\r | |
306 | ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r | |
307 | ("MODULE_GUID" ,self.Guid),\r | |
308 | ("MODULE_VERSION" ,self.Version),\r | |
309 | ("MODULE_TYPE" ,self.ModuleType),\r | |
310 | ("MODULE_FILE" ,str(self.MetaFile)),\r | |
311 | ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r | |
312 | ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r | |
313 | ("MODULE_DIR" ,self.SourceDir),\r | |
314 | ("BASE_NAME" ,self.Name),\r | |
315 | ("ARCH" ,self.Arch),\r | |
316 | ("TOOLCHAIN" ,self.ToolChain),\r | |
317 | ("TOOLCHAIN_TAG" ,self.ToolChain),\r | |
318 | ("TOOL_CHAIN_TAG" ,self.ToolChain),\r | |
319 | ("TARGET" ,self.BuildTarget),\r | |
320 | ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r | |
321 | ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
322 | ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
323 | ("MODULE_BUILD_DIR" ,self.BuildDir),\r | |
324 | ("OUTPUT_DIR" ,self.OutputDir),\r | |
325 | ("DEBUG_DIR" ,self.DebugDir),\r | |
326 | ("DEST_DIR_OUTPUT" ,self.OutputDir),\r | |
327 | ("DEST_DIR_DEBUG" ,self.DebugDir),\r | |
328 | ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r | |
329 | ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r | |
330 | ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r | |
331 | ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r | |
332 | ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r | |
333 | ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r | |
334 | ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r | |
335 | ))\r | |
336 | \r | |
337 | ## Return the module build data object\r | |
338 | @cached_property\r | |
339 | def Module(self):\r | |
340 | return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
341 | \r | |
342 | ## Return the module name\r | |
343 | @cached_property\r | |
344 | def Name(self):\r | |
345 | return self.Module.BaseName\r | |
346 | \r | |
347 | ## Return the module DxsFile if exist\r | |
348 | @cached_property\r | |
349 | def DxsFile(self):\r | |
350 | return self.Module.DxsFile\r | |
351 | \r | |
352 | ## Return the module meta-file GUID\r | |
353 | @cached_property\r | |
354 | def Guid(self):\r | |
355 | #\r | |
356 | # To build same module more than once, the module path with FILE_GUID overridden has\r | |
357 | # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r | |
358 | # in DSC. The overridden GUID can be retrieved from file name\r | |
359 | #\r | |
360 | if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r | |
361 | #\r | |
362 | # Length of GUID is 36\r | |
363 | #\r | |
364 | return os.path.basename(self.MetaFile.Path)[:36]\r | |
365 | return self.Module.Guid\r | |
366 | \r | |
367 | ## Return the module version\r | |
368 | @cached_property\r | |
369 | def Version(self):\r | |
370 | return self.Module.Version\r | |
371 | \r | |
372 | ## Return the module type\r | |
373 | @cached_property\r | |
374 | def ModuleType(self):\r | |
375 | return self.Module.ModuleType\r | |
376 | \r | |
377 | ## Return the component type (for Edk.x style of module)\r | |
378 | @cached_property\r | |
379 | def ComponentType(self):\r | |
380 | return self.Module.ComponentType\r | |
381 | \r | |
382 | ## Return the build type\r | |
383 | @cached_property\r | |
384 | def BuildType(self):\r | |
385 | return self.Module.BuildType\r | |
386 | \r | |
387 | ## Return the PCD_IS_DRIVER setting\r | |
388 | @cached_property\r | |
389 | def PcdIsDriver(self):\r | |
390 | return self.Module.PcdIsDriver\r | |
391 | \r | |
392 | ## Return the autogen version, i.e. module meta-file version\r | |
393 | @cached_property\r | |
394 | def AutoGenVersion(self):\r | |
395 | return self.Module.AutoGenVersion\r | |
396 | \r | |
397 | ## Check if the module is library or not\r | |
398 | @cached_property\r | |
399 | def IsLibrary(self):\r | |
400 | return bool(self.Module.LibraryClass)\r | |
401 | \r | |
402 | ## Check if the module is binary module or not\r | |
403 | @cached_property\r | |
404 | def IsBinaryModule(self):\r | |
405 | return self.Module.IsBinaryModule\r | |
406 | \r | |
407 | ## Return the directory to store intermediate files of the module\r | |
408 | @cached_property\r | |
409 | def BuildDir(self):\r | |
410 | return _MakeDir((\r | |
411 | self.PlatformInfo.BuildDir,\r | |
412 | self.Arch,\r | |
413 | self.SourceDir,\r | |
414 | self.MetaFile.BaseName\r | |
415 | ))\r | |
416 | \r | |
417 | ## Return the directory to store the intermediate object files of the module\r | |
418 | @cached_property\r | |
419 | def OutputDir(self):\r | |
420 | return _MakeDir((self.BuildDir, "OUTPUT"))\r | |
421 | \r | |
422 | ## Return the directory path to store ffs file\r | |
423 | @cached_property\r | |
424 | def FfsOutputDir(self):\r | |
425 | if GlobalData.gFdfParser:\r | |
426 | return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r | |
427 | return ''\r | |
428 | \r | |
429 | ## Return the directory to store auto-gened source files of the module\r | |
430 | @cached_property\r | |
431 | def DebugDir(self):\r | |
432 | return _MakeDir((self.BuildDir, "DEBUG"))\r | |
433 | \r | |
434 | ## Return the path of custom file\r | |
435 | @cached_property\r | |
436 | def CustomMakefile(self):\r | |
437 | RetVal = {}\r | |
438 | for Type in self.Module.CustomMakefile:\r | |
439 | MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r | |
440 | File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r | |
441 | RetVal[MakeType] = File\r | |
442 | return RetVal\r | |
443 | \r | |
444 | ## Return the directory of the makefile\r | |
445 | #\r | |
446 | # @retval string The directory string of module's makefile\r | |
447 | #\r | |
448 | @cached_property\r | |
449 | def MakeFileDir(self):\r | |
450 | return self.BuildDir\r | |
451 | \r | |
452 | ## Return build command string\r | |
453 | #\r | |
454 | # @retval string Build command string\r | |
455 | #\r | |
456 | @cached_property\r | |
457 | def BuildCommand(self):\r | |
458 | return self.PlatformInfo.BuildCommand\r | |
459 | \r | |
460 | ## Get object list of all packages the module and its dependent libraries belong to\r | |
461 | #\r | |
462 | # @retval list The list of package object\r | |
463 | #\r | |
464 | @cached_property\r | |
465 | def DerivedPackageList(self):\r | |
466 | PackageList = []\r | |
467 | for M in [self.Module] + self.DependentLibraryList:\r | |
468 | for Package in M.Packages:\r | |
469 | if Package in PackageList:\r | |
470 | continue\r | |
471 | PackageList.append(Package)\r | |
472 | return PackageList\r | |
473 | \r | |
474 | ## Get the depex string\r | |
475 | #\r | |
476 | # @return : a string contain all depex expression.\r | |
477 | def _GetDepexExpresionString(self):\r | |
478 | DepexStr = ''\r | |
479 | DepexList = []\r | |
480 | ## DPX_SOURCE IN Define section.\r | |
481 | if self.Module.DxsFile:\r | |
482 | return DepexStr\r | |
483 | for M in [self.Module] + self.DependentLibraryList:\r | |
484 | Filename = M.MetaFile.Path\r | |
485 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
486 | DepexExpressionList = InfObj.GetDepexExpresionList()\r | |
487 | for DepexExpression in DepexExpressionList:\r | |
488 | for key in DepexExpression:\r | |
489 | Arch, ModuleType = key\r | |
490 | DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r | |
491 | # the type of build module is USER_DEFINED.\r | |
492 | # All different DEPEX section tags would be copied into the As Built INF file\r | |
493 | # and there would be separate DEPEX section tags\r | |
494 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r | |
495 | if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r | |
496 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
497 | else:\r | |
498 | if Arch.upper() == TAB_ARCH_COMMON or \\r | |
499 | (Arch.upper() == self.Arch.upper() and \\r | |
500 | ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r | |
501 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
502 | \r | |
503 | #the type of build module is USER_DEFINED.\r | |
504 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r | |
505 | for Depex in DepexList:\r | |
506 | for key in Depex:\r | |
507 | DepexStr += '[Depex.%s.%s]\n' % key\r | |
508 | DepexStr += '\n'.join('# '+ val for val in Depex[key])\r | |
509 | DepexStr += '\n\n'\r | |
510 | if not DepexStr:\r | |
511 | return '[Depex.%s]\n' % self.Arch\r | |
512 | return DepexStr\r | |
513 | \r | |
514 | #the type of build module not is USER_DEFINED.\r | |
515 | Count = 0\r | |
516 | for Depex in DepexList:\r | |
517 | Count += 1\r | |
518 | if DepexStr != '':\r | |
519 | DepexStr += ' AND '\r | |
520 | DepexStr += '('\r | |
521 | for D in Depex.values():\r | |
522 | DepexStr += ' '.join(val for val in D)\r | |
523 | Index = DepexStr.find('END')\r | |
524 | if Index > -1 and Index == len(DepexStr) - 3:\r | |
525 | DepexStr = DepexStr[:-3]\r | |
526 | DepexStr = DepexStr.strip()\r | |
527 | DepexStr += ')'\r | |
528 | if Count == 1:\r | |
529 | DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r | |
530 | if not DepexStr:\r | |
531 | return '[Depex.%s]\n' % self.Arch\r | |
532 | return '[Depex.%s]\n# ' % self.Arch + DepexStr\r | |
533 | \r | |
534 | ## Merge dependency expression\r | |
535 | #\r | |
536 | # @retval list The token list of the dependency expression after parsed\r | |
537 | #\r | |
538 | @cached_property\r | |
539 | def DepexList(self):\r | |
540 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
541 | return {}\r | |
542 | \r | |
543 | DepexList = []\r | |
544 | #\r | |
545 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r | |
546 | #\r | |
547 | FixedVoidTypePcds = {}\r | |
548 | for M in [self] + self.LibraryAutoGenList:\r | |
549 | FixedVoidTypePcds.update(M.FixedVoidTypePcds)\r | |
550 | for M in [self] + self.LibraryAutoGenList:\r | |
551 | Inherited = False\r | |
552 | for D in M.Module.Depex[self.Arch, self.ModuleType]:\r | |
553 | if DepexList != []:\r | |
554 | DepexList.append('AND')\r | |
555 | DepexList.append('(')\r | |
556 | #replace D with value if D is FixedAtBuild PCD\r | |
557 | NewList = []\r | |
558 | for item in D:\r | |
559 | if '.' not in item:\r | |
560 | NewList.append(item)\r | |
561 | else:\r | |
562 | try:\r | |
563 | Value = FixedVoidTypePcds[item]\r | |
564 | if len(Value.split(',')) != 16:\r | |
565 | EdkLogger.error("build", FORMAT_INVALID,\r | |
566 | "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r | |
567 | NewList.append(Value)\r | |
568 | except:\r | |
569 | EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r | |
570 | \r | |
571 | DepexList.extend(NewList)\r | |
572 | if DepexList[-1] == 'END': # no need of a END at this time\r | |
573 | DepexList.pop()\r | |
574 | DepexList.append(')')\r | |
575 | Inherited = True\r | |
576 | if Inherited:\r | |
577 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))\r | |
578 | if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r | |
579 | break\r | |
580 | if len(DepexList) > 0:\r | |
581 | EdkLogger.verbose('')\r | |
582 | return {self.ModuleType:DepexList}\r | |
583 | \r | |
584 | ## Merge dependency expression\r | |
585 | #\r | |
586 | # @retval list The token list of the dependency expression after parsed\r | |
587 | #\r | |
588 | @cached_property\r | |
589 | def DepexExpressionDict(self):\r | |
590 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
591 | return {}\r | |
592 | \r | |
593 | DepexExpressionString = ''\r | |
594 | #\r | |
595 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r | |
596 | #\r | |
597 | for M in [self.Module] + self.DependentLibraryList:\r | |
598 | Inherited = False\r | |
599 | for D in M.DepexExpression[self.Arch, self.ModuleType]:\r | |
600 | if DepexExpressionString != '':\r | |
601 | DepexExpressionString += ' AND '\r | |
602 | DepexExpressionString += '('\r | |
603 | DepexExpressionString += D\r | |
604 | DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r | |
605 | DepexExpressionString += ')'\r | |
606 | Inherited = True\r | |
607 | if Inherited:\r | |
608 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r | |
609 | if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r | |
610 | break\r | |
611 | if len(DepexExpressionString) > 0:\r | |
612 | EdkLogger.verbose('')\r | |
613 | \r | |
614 | return {self.ModuleType:DepexExpressionString}\r | |
615 | \r | |
616 | # Get the tiano core user extension, it is contain dependent library.\r | |
617 | # @retval: a list contain tiano core userextension.\r | |
618 | #\r | |
619 | def _GetTianoCoreUserExtensionList(self):\r | |
620 | TianoCoreUserExtentionList = []\r | |
621 | for M in [self.Module] + self.DependentLibraryList:\r | |
622 | Filename = M.MetaFile.Path\r | |
623 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
624 | TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r | |
625 | for TianoCoreUserExtent in TianoCoreUserExtenList:\r | |
626 | for Section in TianoCoreUserExtent:\r | |
627 | ItemList = Section.split(TAB_SPLIT)\r | |
628 | Arch = self.Arch\r | |
629 | if len(ItemList) == 4:\r | |
630 | Arch = ItemList[3]\r | |
631 | if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r | |
632 | TianoCoreList = []\r | |
633 | TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r | |
634 | TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r | |
635 | TianoCoreList.append('\n')\r | |
636 | TianoCoreUserExtentionList.append(TianoCoreList)\r | |
637 | \r | |
638 | return TianoCoreUserExtentionList\r | |
639 | \r | |
640 | ## Return the list of specification version required for the module\r | |
641 | #\r | |
642 | # @retval list The list of specification defined in module file\r | |
643 | #\r | |
644 | @cached_property\r | |
645 | def Specification(self):\r | |
646 | return self.Module.Specification\r | |
647 | \r | |
648 | ## Tool option for the module build\r | |
649 | #\r | |
650 | # @param PlatformInfo The object of PlatformBuildInfo\r | |
651 | # @retval dict The dict containing valid options\r | |
652 | #\r | |
653 | @cached_property\r | |
654 | def BuildOption(self):\r | |
655 | RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r | |
656 | if self.BuildRuleOrder:\r | |
657 | self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r | |
658 | return RetVal\r | |
659 | \r | |
660 | ## Get include path list from tool option for the module build\r | |
661 | #\r | |
662 | # @retval list The include path list\r | |
663 | #\r | |
664 | @cached_property\r | |
665 | def BuildOptionIncPathList(self):\r | |
666 | #\r | |
667 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
668 | # is the former use /I , the Latter used -I to specify include directories\r | |
669 | #\r | |
670 | if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r | |
671 | BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r | |
672 | elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r | |
673 | BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r | |
674 | else:\r | |
675 | #\r | |
676 | # New ToolChainFamily, don't known whether there is option to specify include directories\r | |
677 | #\r | |
678 | return []\r | |
679 | \r | |
680 | RetVal = []\r | |
681 | for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r | |
682 | try:\r | |
683 | FlagOption = self.BuildOption[Tool]['FLAGS']\r | |
684 | except KeyError:\r | |
685 | FlagOption = ''\r | |
686 | \r | |
687 | if self.ToolChainFamily != 'RVCT':\r | |
688 | IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r | |
689 | else:\r | |
690 | #\r | |
691 | # RVCT may specify a list of directory seperated by commas\r | |
692 | #\r | |
693 | IncPathList = []\r | |
694 | for Path in BuildOptIncludeRegEx.findall(FlagOption):\r | |
695 | PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r | |
696 | IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r | |
697 | \r | |
698 | #\r | |
699 | # EDK II modules must not reference header files outside of the packages they depend on or\r | |
700 | # within the module's directory tree. Report error if violation.\r | |
701 | #\r | |
702 | if GlobalData.gDisableIncludePathCheck == False:\r | |
703 | for Path in IncPathList:\r | |
704 | if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r | |
705 | ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r | |
706 | EdkLogger.error("build",\r | |
707 | PARAMETER_INVALID,\r | |
708 | ExtraData=ErrMsg,\r | |
709 | File=str(self.MetaFile))\r | |
710 | RetVal += IncPathList\r | |
711 | return RetVal\r | |
712 | \r | |
713 | ## Return a list of files which can be built from source\r | |
714 | #\r | |
715 | # What kind of files can be built is determined by build rules in\r | |
716 | # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r | |
717 | #\r | |
718 | @cached_property\r | |
719 | def SourceFileList(self):\r | |
720 | RetVal = []\r | |
721 | ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r | |
722 | ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r | |
723 | for F in self.Module.Sources:\r | |
724 | # match tool chain\r | |
725 | if F.TagName not in ToolChainTagSet:\r | |
726 | EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r | |
727 | "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r | |
728 | continue\r | |
729 | # match tool chain family or build rule family\r | |
730 | if F.ToolChainFamily not in ToolChainFamilySet:\r | |
731 | EdkLogger.debug(\r | |
732 | EdkLogger.DEBUG_0,\r | |
733 | "The file [%s] must be built by tools of [%s], " \\r | |
734 | "but current toolchain family is [%s], buildrule family is [%s]" \\r | |
735 | % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r | |
736 | continue\r | |
737 | \r | |
738 | # add the file path into search path list for file including\r | |
739 | if F.Dir not in self.IncludePathList:\r | |
740 | self.IncludePathList.insert(0, F.Dir)\r | |
741 | RetVal.append(F)\r | |
742 | \r | |
743 | self._MatchBuildRuleOrder(RetVal)\r | |
744 | \r | |
745 | for F in RetVal:\r | |
746 | self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r | |
747 | return RetVal\r | |
748 | \r | |
749 | def _MatchBuildRuleOrder(self, FileList):\r | |
750 | Order_Dict = {}\r | |
751 | self.BuildOption\r | |
752 | for SingleFile in FileList:\r | |
753 | if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r | |
754 | key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r | |
755 | if key in Order_Dict:\r | |
756 | Order_Dict[key].append(SingleFile.Ext)\r | |
757 | else:\r | |
758 | Order_Dict[key] = [SingleFile.Ext]\r | |
759 | \r | |
760 | RemoveList = []\r | |
761 | for F in Order_Dict:\r | |
762 | if len(Order_Dict[F]) > 1:\r | |
763 | Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r | |
764 | for Ext in Order_Dict[F][1:]:\r | |
765 | RemoveList.append(F + Ext)\r | |
766 | \r | |
767 | for item in RemoveList:\r | |
768 | FileList.remove(item)\r | |
769 | \r | |
770 | return FileList\r | |
771 | \r | |
772 | ## Return the list of unicode files\r | |
773 | @cached_property\r | |
774 | def UnicodeFileList(self):\r | |
775 | return self.FileTypes.get(TAB_UNICODE_FILE,[])\r | |
776 | \r | |
777 | ## Return the list of vfr files\r | |
778 | @cached_property\r | |
779 | def VfrFileList(self):\r | |
780 | return self.FileTypes.get(TAB_VFR_FILE, [])\r | |
781 | \r | |
782 | ## Return the list of Image Definition files\r | |
783 | @cached_property\r | |
784 | def IdfFileList(self):\r | |
785 | return self.FileTypes.get(TAB_IMAGE_FILE,[])\r | |
786 | \r | |
787 | ## Return a list of files which can be built from binary\r | |
788 | #\r | |
789 | # "Build" binary files are just to copy them to build directory.\r | |
790 | #\r | |
791 | # @retval list The list of files which can be built later\r | |
792 | #\r | |
793 | @cached_property\r | |
794 | def BinaryFileList(self):\r | |
795 | RetVal = []\r | |
796 | for F in self.Module.Binaries:\r | |
797 | if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r | |
798 | continue\r | |
799 | RetVal.append(F)\r | |
800 | self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r | |
801 | return RetVal\r | |
802 | \r | |
803 | @cached_property\r | |
804 | def BuildRules(self):\r | |
805 | RetVal = {}\r | |
806 | BuildRuleDatabase = self.PlatformInfo.BuildRule\r | |
807 | for Type in BuildRuleDatabase.FileTypeList:\r | |
808 | #first try getting build rule by BuildRuleFamily\r | |
809 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r | |
810 | if not RuleObject:\r | |
811 | # build type is always module type, but ...\r | |
812 | if self.ModuleType != self.BuildType:\r | |
813 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r | |
814 | #second try getting build rule by ToolChainFamily\r | |
815 | if not RuleObject:\r | |
816 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r | |
817 | if not RuleObject:\r | |
818 | # build type is always module type, but ...\r | |
819 | if self.ModuleType != self.BuildType:\r | |
820 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r | |
821 | if not RuleObject:\r | |
822 | continue\r | |
823 | RuleObject = RuleObject.Instantiate(self.Macros)\r | |
824 | RetVal[Type] = RuleObject\r | |
825 | for Ext in RuleObject.SourceFileExtList:\r | |
826 | RetVal[Ext] = RuleObject\r | |
827 | return RetVal\r | |
828 | \r | |
829 | def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r | |
830 | if self._BuildTargets is None:\r | |
831 | self._IntroBuildTargetList = set()\r | |
832 | self._FinalBuildTargetList = set()\r | |
833 | self._BuildTargets = defaultdict(set)\r | |
834 | self._FileTypes = defaultdict(set)\r | |
835 | \r | |
836 | if not BinaryFileList:\r | |
837 | BinaryFileList = self.BinaryFileList\r | |
838 | \r | |
839 | SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r | |
840 | if not os.path.exists(SubDirectory):\r | |
841 | CreateDirectory(SubDirectory)\r | |
842 | LastTarget = None\r | |
843 | RuleChain = set()\r | |
844 | SourceList = [File]\r | |
845 | Index = 0\r | |
846 | #\r | |
847 | # Make sure to get build rule order value\r | |
848 | #\r | |
849 | self.BuildOption\r | |
850 | \r | |
851 | while Index < len(SourceList):\r | |
852 | Source = SourceList[Index]\r | |
853 | Index = Index + 1\r | |
854 | \r | |
855 | if Source != File:\r | |
856 | CreateDirectory(Source.Dir)\r | |
857 | \r | |
858 | if File.IsBinary and File == Source and File in BinaryFileList:\r | |
859 | # Skip all files that are not binary libraries\r | |
860 | if not self.IsLibrary:\r | |
861 | continue\r | |
862 | RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r | |
863 | elif FileType in self.BuildRules:\r | |
864 | RuleObject = self.BuildRules[FileType]\r | |
865 | elif Source.Ext in self.BuildRules:\r | |
866 | RuleObject = self.BuildRules[Source.Ext]\r | |
867 | else:\r | |
868 | # stop at no more rules\r | |
869 | if LastTarget:\r | |
870 | self._FinalBuildTargetList.add(LastTarget)\r | |
871 | break\r | |
872 | \r | |
873 | FileType = RuleObject.SourceFileType\r | |
874 | self._FileTypes[FileType].add(Source)\r | |
875 | \r | |
876 | # stop at STATIC_LIBRARY for library\r | |
877 | if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r | |
878 | if LastTarget:\r | |
879 | self._FinalBuildTargetList.add(LastTarget)\r | |
880 | break\r | |
881 | \r | |
882 | Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r | |
883 | if not Target:\r | |
884 | if LastTarget:\r | |
885 | self._FinalBuildTargetList.add(LastTarget)\r | |
886 | break\r | |
887 | elif not Target.Outputs:\r | |
888 | # Only do build for target with outputs\r | |
889 | self._FinalBuildTargetList.add(Target)\r | |
890 | \r | |
891 | self._BuildTargets[FileType].add(Target)\r | |
892 | \r | |
893 | if not Source.IsBinary and Source == File:\r | |
894 | self._IntroBuildTargetList.add(Target)\r | |
895 | \r | |
896 | # to avoid cyclic rule\r | |
897 | if FileType in RuleChain:\r | |
898 | break\r | |
899 | \r | |
900 | RuleChain.add(FileType)\r | |
901 | SourceList.extend(Target.Outputs)\r | |
902 | LastTarget = Target\r | |
903 | FileType = TAB_UNKNOWN_FILE\r | |
904 | \r | |
905 | @cached_property\r | |
906 | def Targets(self):\r | |
907 | if self._BuildTargets is None:\r | |
908 | self._IntroBuildTargetList = set()\r | |
909 | self._FinalBuildTargetList = set()\r | |
910 | self._BuildTargets = defaultdict(set)\r | |
911 | self._FileTypes = defaultdict(set)\r | |
912 | \r | |
913 | #TRICK: call SourceFileList property to apply build rule for source files\r | |
914 | self.SourceFileList\r | |
915 | \r | |
916 | #TRICK: call _GetBinaryFileList to apply build rule for binary files\r | |
917 | self.BinaryFileList\r | |
918 | \r | |
919 | return self._BuildTargets\r | |
920 | \r | |
921 | @cached_property\r | |
922 | def IntroTargetList(self):\r | |
923 | self.Targets\r | |
924 | return self._IntroBuildTargetList\r | |
925 | \r | |
926 | @cached_property\r | |
927 | def CodaTargetList(self):\r | |
928 | self.Targets\r | |
929 | return self._FinalBuildTargetList\r | |
930 | \r | |
931 | @cached_property\r | |
932 | def FileTypes(self):\r | |
933 | self.Targets\r | |
934 | return self._FileTypes\r | |
935 | \r | |
936 | ## Get the list of package object the module depends on\r | |
937 | #\r | |
938 | # @retval list The package object list\r | |
939 | #\r | |
940 | @cached_property\r | |
941 | def DependentPackageList(self):\r | |
942 | return self.Module.Packages\r | |
943 | \r | |
944 | ## Return the list of auto-generated code file\r | |
945 | #\r | |
946 | # @retval list The list of auto-generated file\r | |
947 | #\r | |
948 | @cached_property\r | |
949 | def AutoGenFileList(self):\r | |
950 | AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r | |
951 | UniStringBinBuffer = BytesIO()\r | |
952 | IdfGenBinBuffer = BytesIO()\r | |
953 | RetVal = {}\r | |
954 | AutoGenC = TemplateString()\r | |
955 | AutoGenH = TemplateString()\r | |
956 | StringH = TemplateString()\r | |
957 | StringIdf = TemplateString()\r | |
958 | GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r | |
959 | #\r | |
960 | # AutoGen.c is generated if there are library classes in inf, or there are object files\r | |
961 | #\r | |
962 | if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r | |
963 | or TAB_OBJECT_FILE in self.FileTypes):\r | |
964 | AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r | |
965 | RetVal[AutoFile] = str(AutoGenC)\r | |
966 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
967 | if str(AutoGenH) != "":\r | |
968 | AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r | |
969 | RetVal[AutoFile] = str(AutoGenH)\r | |
970 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
971 | if str(StringH) != "":\r | |
972 | AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r | |
973 | RetVal[AutoFile] = str(StringH)\r | |
974 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
975 | if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r | |
976 | AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r | |
977 | RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r | |
978 | AutoFile.IsBinary = True\r | |
979 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
980 | if UniStringBinBuffer is not None:\r | |
981 | UniStringBinBuffer.close()\r | |
982 | if str(StringIdf) != "":\r | |
983 | AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r | |
984 | RetVal[AutoFile] = str(StringIdf)\r | |
985 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
986 | if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r | |
987 | AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r | |
988 | RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r | |
989 | AutoFile.IsBinary = True\r | |
990 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
991 | if IdfGenBinBuffer is not None:\r | |
992 | IdfGenBinBuffer.close()\r | |
993 | return RetVal\r | |
994 | \r | |
995 | ## Return the list of library modules explicitly or implicitly used by this module\r | |
996 | @cached_property\r | |
997 | def DependentLibraryList(self):\r | |
998 | # only merge library classes and PCD for non-library module\r | |
999 | if self.IsLibrary:\r | |
1000 | return []\r | |
1001 | return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r | |
1002 | \r | |
1003 | ## Get the list of PCDs from current module\r | |
1004 | #\r | |
1005 | # @retval list The list of PCD\r | |
1006 | #\r | |
1007 | @cached_property\r | |
1008 | def ModulePcdList(self):\r | |
1009 | # apply PCD settings from platform\r | |
1010 | RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r | |
1011 | \r | |
1012 | return RetVal\r | |
1013 | @cached_property\r | |
1014 | def _PcdComments(self):\r | |
1015 | ReVal = OrderedListDict()\r | |
1016 | ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)\r | |
1017 | if not self.IsLibrary:\r | |
1018 | for Library in self.DependentLibraryList:\r | |
1019 | ExtendCopyDictionaryLists(ReVal, Library.PcdComments)\r | |
1020 | return ReVal\r | |
1021 | \r | |
1022 | ## Get the list of PCDs from dependent libraries\r | |
1023 | #\r | |
1024 | # @retval list The list of PCD\r | |
1025 | #\r | |
1026 | @cached_property\r | |
1027 | def LibraryPcdList(self):\r | |
1028 | if self.IsLibrary:\r | |
1029 | return []\r | |
1030 | RetVal = []\r | |
1031 | Pcds = set()\r | |
1032 | # get PCDs from dependent libraries\r | |
1033 | for Library in self.DependentLibraryList:\r | |
1034 | PcdsInLibrary = OrderedDict()\r | |
1035 | for Key in Library.Pcds:\r | |
1036 | # skip duplicated PCDs\r | |
1037 | if Key in self.Module.Pcds or Key in Pcds:\r | |
1038 | continue\r | |
1039 | Pcds.add(Key)\r | |
1040 | PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r | |
1041 | RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r | |
1042 | return RetVal\r | |
1043 | \r | |
1044 | ## Get the GUID value mapping\r | |
1045 | #\r | |
1046 | # @retval dict The mapping between GUID cname and its value\r | |
1047 | #\r | |
1048 | @cached_property\r | |
1049 | def GuidList(self):\r | |
1050 | RetVal = self.Module.Guids\r | |
1051 | for Library in self.DependentLibraryList:\r | |
1052 | RetVal.update(Library.Guids)\r | |
1053 | ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r | |
1054 | ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r | |
1055 | return RetVal\r | |
1056 | \r | |
1057 | @cached_property\r | |
1058 | def GetGuidsUsedByPcd(self):\r | |
1059 | RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r | |
1060 | for Library in self.DependentLibraryList:\r | |
1061 | RetVal.update(Library.GetGuidsUsedByPcd())\r | |
1062 | return RetVal\r | |
1063 | ## Get the protocol value mapping\r | |
1064 | #\r | |
1065 | # @retval dict The mapping between protocol cname and its value\r | |
1066 | #\r | |
1067 | @cached_property\r | |
1068 | def ProtocolList(self):\r | |
1069 | RetVal = OrderedDict(self.Module.Protocols)\r | |
1070 | for Library in self.DependentLibraryList:\r | |
1071 | RetVal.update(Library.Protocols)\r | |
1072 | ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r | |
1073 | ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r | |
1074 | return RetVal\r | |
1075 | \r | |
1076 | ## Get the PPI value mapping\r | |
1077 | #\r | |
1078 | # @retval dict The mapping between PPI cname and its value\r | |
1079 | #\r | |
1080 | @cached_property\r | |
1081 | def PpiList(self):\r | |
1082 | RetVal = OrderedDict(self.Module.Ppis)\r | |
1083 | for Library in self.DependentLibraryList:\r | |
1084 | RetVal.update(Library.Ppis)\r | |
1085 | ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r | |
1086 | ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r | |
1087 | return RetVal\r | |
1088 | \r | |
1089 | ## Get the list of include search path\r | |
1090 | #\r | |
1091 | # @retval list The list path\r | |
1092 | #\r | |
1093 | @cached_property\r | |
1094 | def IncludePathList(self):\r | |
1095 | RetVal = []\r | |
1096 | RetVal.append(self.MetaFile.Dir)\r | |
1097 | RetVal.append(self.DebugDir)\r | |
1098 | \r | |
1099 | for Package in self.Module.Packages:\r | |
1100 | PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r | |
1101 | if PackageDir not in RetVal:\r | |
1102 | RetVal.append(PackageDir)\r | |
1103 | IncludesList = Package.Includes\r | |
1104 | if Package._PrivateIncludes:\r | |
1105 | if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r | |
1106 | IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r | |
1107 | for Inc in IncludesList:\r | |
1108 | if Inc not in RetVal:\r | |
1109 | RetVal.append(str(Inc))\r | |
1110 | return RetVal\r | |
1111 | \r | |
1112 | @cached_property\r | |
1113 | def IncludePathLength(self):\r | |
1114 | return sum(len(inc)+1 for inc in self.IncludePathList)\r | |
1115 | \r | |
82407bd1 RC |
1116 | ## Get the list of include paths from the packages\r |
1117 | #\r | |
1118 | # @IncludesList list The list path\r | |
1119 | #\r | |
1120 | @cached_property\r | |
1121 | def PackageIncludePathList(self):\r | |
1122 | IncludesList = []\r | |
1123 | for Package in self.Module.Packages:\r | |
1124 | PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r | |
1125 | IncludesList = Package.Includes\r | |
1126 | if Package._PrivateIncludes:\r | |
1127 | if not self.MetaFile.Path.startswith(PackageDir):\r | |
1128 | IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r | |
1129 | return IncludesList\r | |
1130 | \r | |
e8449e1d FB |
1131 | ## Get HII EX PCDs which maybe used by VFR\r |
1132 | #\r | |
1133 | # efivarstore used by VFR may relate with HII EX PCDs\r | |
1134 | # Get the variable name and GUID from efivarstore and HII EX PCD\r | |
1135 | # List the HII EX PCDs in As Built INF if both name and GUID match.\r | |
1136 | #\r | |
1137 | # @retval list HII EX PCDs\r | |
1138 | #\r | |
1139 | def _GetPcdsMaybeUsedByVfr(self):\r | |
1140 | if not self.SourceFileList:\r | |
1141 | return []\r | |
1142 | \r | |
1143 | NameGuids = set()\r | |
1144 | for SrcFile in self.SourceFileList:\r | |
1145 | if SrcFile.Ext.lower() != '.vfr':\r | |
1146 | continue\r | |
1147 | Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r | |
1148 | if not os.path.exists(Vfri):\r | |
1149 | continue\r | |
1150 | VfriFile = open(Vfri, 'r')\r | |
1151 | Content = VfriFile.read()\r | |
1152 | VfriFile.close()\r | |
1153 | Pos = Content.find('efivarstore')\r | |
1154 | while Pos != -1:\r | |
1155 | #\r | |
1156 | # Make sure 'efivarstore' is the start of efivarstore statement\r | |
1157 | # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r | |
1158 | #\r | |
1159 | Index = Pos - 1\r | |
1160 | while Index >= 0 and Content[Index] in ' \t\r\n':\r | |
1161 | Index -= 1\r | |
1162 | if Index >= 0 and Content[Index] != ';':\r | |
1163 | Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r | |
1164 | continue\r | |
1165 | #\r | |
1166 | # 'efivarstore' must be followed by name and guid\r | |
1167 | #\r | |
1168 | Name = gEfiVarStoreNamePattern.search(Content, Pos)\r | |
1169 | if not Name:\r | |
1170 | break\r | |
1171 | Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r | |
1172 | if not Guid:\r | |
1173 | break\r | |
1174 | NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r | |
1175 | NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r | |
1176 | Pos = Content.find('efivarstore', Name.end())\r | |
1177 | if not NameGuids:\r | |
1178 | return []\r | |
1179 | HiiExPcds = []\r | |
1180 | for Pcd in self.PlatformInfo.Pcds.values():\r | |
1181 | if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r | |
1182 | continue\r | |
1183 | for SkuInfo in Pcd.SkuInfoList.values():\r | |
1184 | Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r | |
1185 | if not Value:\r | |
1186 | continue\r | |
1187 | Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r | |
1188 | Guid = GuidStructureStringToGuidString(Value)\r | |
1189 | if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r | |
1190 | HiiExPcds.append(Pcd)\r | |
1191 | break\r | |
1192 | \r | |
1193 | return HiiExPcds\r | |
1194 | \r | |
1195 | def _GenOffsetBin(self):\r | |
1196 | VfrUniBaseName = {}\r | |
1197 | for SourceFile in self.Module.Sources:\r | |
1198 | if SourceFile.Type.upper() == ".VFR" :\r | |
1199 | #\r | |
1200 | # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r | |
1201 | #\r | |
1202 | VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r | |
1203 | elif SourceFile.Type.upper() == ".UNI" :\r | |
1204 | #\r | |
1205 | # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r | |
1206 | #\r | |
1207 | VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r | |
1208 | \r | |
1209 | if not VfrUniBaseName:\r | |
1210 | return None\r | |
1211 | MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r | |
1212 | EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r | |
1213 | VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r | |
1214 | if not VfrUniOffsetList:\r | |
1215 | return None\r | |
1216 | \r | |
1217 | OutputName = '%sOffset.bin' % self.Name\r | |
1218 | UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r | |
1219 | \r | |
1220 | try:\r | |
1221 | fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r | |
1222 | except:\r | |
1223 | EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r | |
1224 | \r | |
1225 | # Use a instance of BytesIO to cache data\r | |
1226 | fStringIO = BytesIO()\r | |
1227 | \r | |
1228 | for Item in VfrUniOffsetList:\r | |
1229 | if (Item[0].find("Strings") != -1):\r | |
1230 | #\r | |
1231 | # UNI offset in image.\r | |
1232 | # GUID + Offset\r | |
1233 | # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r | |
1234 | #\r | |
1235 | UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r | |
1236 | fStringIO.write(UniGuid)\r | |
1237 | UniValue = pack ('Q', int (Item[1], 16))\r | |
1238 | fStringIO.write (UniValue)\r | |
1239 | else:\r | |
1240 | #\r | |
1241 | # VFR binary offset in image.\r | |
1242 | # GUID + Offset\r | |
1243 | # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r | |
1244 | #\r | |
1245 | VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r | |
1246 | fStringIO.write(VfrGuid)\r | |
1247 | VfrValue = pack ('Q', int (Item[1], 16))\r | |
1248 | fStringIO.write (VfrValue)\r | |
1249 | #\r | |
1250 | # write data into file.\r | |
1251 | #\r | |
1252 | try :\r | |
1253 | fInputfile.write (fStringIO.getvalue())\r | |
1254 | except:\r | |
1255 | EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r | |
1256 | "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r | |
1257 | \r | |
1258 | fStringIO.close ()\r | |
1259 | fInputfile.close ()\r | |
1260 | return OutputName\r | |
1261 | @cached_property\r | |
1262 | def OutputFile(self):\r | |
1263 | retVal = set()\r | |
1264 | OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r | |
1265 | DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r | |
1266 | for Item in self.CodaTargetList:\r | |
1267 | File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r | |
1268 | retVal.add(File)\r | |
1269 | if self.DepexGenerated:\r | |
1270 | retVal.add(self.Name + '.depex')\r | |
1271 | \r | |
1272 | Bin = self._GenOffsetBin()\r | |
1273 | if Bin:\r | |
1274 | retVal.add(Bin)\r | |
1275 | \r | |
1276 | for Root, Dirs, Files in os.walk(OutputDir):\r | |
1277 | for File in Files:\r | |
1278 | if File.lower().endswith('.pdb'):\r | |
1279 | retVal.add(File)\r | |
1280 | \r | |
1281 | return retVal\r | |
1282 | \r | |
1283 | ## Create AsBuilt INF file the module\r | |
1284 | #\r | |
1285 | def CreateAsBuiltInf(self):\r | |
1286 | \r | |
1287 | if self.IsAsBuiltInfCreated:\r | |
1288 | return\r | |
1289 | \r | |
1290 | # Skip INF file generation for libraries\r | |
1291 | if self.IsLibrary:\r | |
1292 | return\r | |
1293 | \r | |
1294 | # Skip the following code for modules with no source files\r | |
1295 | if not self.SourceFileList:\r | |
1296 | return\r | |
1297 | \r | |
1298 | # Skip the following code for modules without any binary files\r | |
1299 | if self.BinaryFileList:\r | |
1300 | return\r | |
1301 | \r | |
1302 | ### TODO: How to handles mixed source and binary modules\r | |
1303 | \r | |
1304 | # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r | |
1305 | # Also find all packages that the DynamicEx PCDs depend on\r | |
1306 | Pcds = []\r | |
1307 | PatchablePcds = []\r | |
1308 | Packages = []\r | |
1309 | PcdCheckList = []\r | |
1310 | PcdTokenSpaceList = []\r | |
1311 | for Pcd in self.ModulePcdList + self.LibraryPcdList:\r | |
1312 | if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
1313 | PatchablePcds.append(Pcd)\r | |
1314 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r | |
1315 | elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1316 | if Pcd not in Pcds:\r | |
1317 | Pcds.append(Pcd)\r | |
1318 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r | |
1319 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r | |
1320 | PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r | |
1321 | GuidList = OrderedDict(self.GuidList)\r | |
1322 | for TokenSpace in self.GetGuidsUsedByPcd:\r | |
1323 | # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r | |
1324 | # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r | |
1325 | if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r | |
1326 | GuidList.pop(TokenSpace)\r | |
1327 | CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r | |
1328 | for Package in self.DerivedPackageList:\r | |
1329 | if Package in Packages:\r | |
1330 | continue\r | |
1331 | BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r | |
1332 | Found = False\r | |
1333 | for Index in range(len(BeChecked)):\r | |
1334 | for Item in CheckList[Index]:\r | |
1335 | if Item in BeChecked[Index]:\r | |
1336 | Packages.append(Package)\r | |
1337 | Found = True\r | |
1338 | break\r | |
1339 | if Found:\r | |
1340 | break\r | |
1341 | \r | |
1342 | VfrPcds = self._GetPcdsMaybeUsedByVfr()\r | |
1343 | for Pkg in self.PlatformInfo.PackageList:\r | |
1344 | if Pkg in Packages:\r | |
1345 | continue\r | |
1346 | for VfrPcd in VfrPcds:\r | |
1347 | if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r | |
1348 | (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r | |
1349 | Packages.append(Pkg)\r | |
1350 | break\r | |
1351 | \r | |
1352 | ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r | |
1353 | DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r | |
1354 | Guid = self.Guid\r | |
1355 | MDefs = self.Module.Defines\r | |
1356 | \r | |
1357 | AsBuiltInfDict = {\r | |
1358 | 'module_name' : self.Name,\r | |
1359 | 'module_guid' : Guid,\r | |
1360 | 'module_module_type' : ModuleType,\r | |
1361 | 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r | |
1362 | 'pcd_is_driver_string' : [],\r | |
1363 | 'module_uefi_specification_version' : [],\r | |
1364 | 'module_pi_specification_version' : [],\r | |
1365 | 'module_entry_point' : self.Module.ModuleEntryPointList,\r | |
1366 | 'module_unload_image' : self.Module.ModuleUnloadImageList,\r | |
1367 | 'module_constructor' : self.Module.ConstructorList,\r | |
1368 | 'module_destructor' : self.Module.DestructorList,\r | |
1369 | 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r | |
1370 | 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r | |
1371 | 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r | |
1372 | 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r | |
1373 | 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r | |
1374 | 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r | |
1375 | 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r | |
1376 | 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r | |
1377 | 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r | |
1378 | 'module_arch' : self.Arch,\r | |
1379 | 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r | |
1380 | 'binary_item' : [],\r | |
1381 | 'patchablepcd_item' : [],\r | |
1382 | 'pcd_item' : [],\r | |
1383 | 'protocol_item' : [],\r | |
1384 | 'ppi_item' : [],\r | |
1385 | 'guid_item' : [],\r | |
1386 | 'flags_item' : [],\r | |
1387 | 'libraryclasses_item' : []\r | |
1388 | }\r | |
1389 | \r | |
1390 | if 'MODULE_UNI_FILE' in MDefs:\r | |
1391 | UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r | |
1392 | if os.path.isfile(UNIFile):\r | |
1393 | shutil.copy2(UNIFile, self.OutputDir)\r | |
1394 | \r | |
1395 | if self.AutoGenVersion > int(gInfSpecVersion, 0):\r | |
1396 | AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r | |
1397 | else:\r | |
1398 | AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r | |
1399 | \r | |
1400 | if DriverType:\r | |
1401 | AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r | |
1402 | \r | |
1403 | if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r | |
1404 | AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r | |
1405 | if 'PI_SPECIFICATION_VERSION' in self.Specification:\r | |
1406 | AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r | |
1407 | \r | |
1408 | OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r | |
1409 | DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r | |
1410 | for Item in self.CodaTargetList:\r | |
1411 | File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r | |
1412 | if os.path.isabs(File):\r | |
1413 | File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r | |
1414 | if Item.Target.Ext.lower() == '.aml':\r | |
1415 | AsBuiltInfDict['binary_item'].append('ASL|' + File)\r | |
1416 | elif Item.Target.Ext.lower() == '.acpi':\r | |
1417 | AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r | |
1418 | elif Item.Target.Ext.lower() == '.efi':\r | |
1419 | AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r | |
1420 | else:\r | |
1421 | AsBuiltInfDict['binary_item'].append('BIN|' + File)\r | |
1422 | if not self.DepexGenerated:\r | |
1423 | DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r | |
1424 | if os.path.exists(DepexFile):\r | |
1425 | self.DepexGenerated = True\r | |
1426 | if self.DepexGenerated:\r | |
1427 | if self.ModuleType in [SUP_MODULE_PEIM]:\r | |
1428 | AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r | |
1429 | elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r | |
1430 | AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r | |
1431 | elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r | |
1432 | AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r | |
1433 | \r | |
1434 | Bin = self._GenOffsetBin()\r | |
1435 | if Bin:\r | |
1436 | AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r | |
1437 | \r | |
1438 | for Root, Dirs, Files in os.walk(OutputDir):\r | |
1439 | for File in Files:\r | |
1440 | if File.lower().endswith('.pdb'):\r | |
1441 | AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r | |
1442 | HeaderComments = self.Module.HeaderComments\r | |
1443 | StartPos = 0\r | |
1444 | for Index in range(len(HeaderComments)):\r | |
1445 | if HeaderComments[Index].find('@BinaryHeader') != -1:\r | |
1446 | HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r | |
1447 | StartPos = Index\r | |
1448 | break\r | |
1449 | AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r | |
1450 | AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r | |
1451 | \r | |
1452 | GenList = [\r | |
1453 | (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r | |
1454 | (self.PpiList, self._PpiComments, 'ppi_item'),\r | |
1455 | (GuidList, self._GuidComments, 'guid_item')\r | |
1456 | ]\r | |
1457 | for Item in GenList:\r | |
1458 | for CName in Item[0]:\r | |
1459 | Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r | |
1460 | Entry = Comments + '\n ' + CName if Comments else CName\r | |
1461 | AsBuiltInfDict[Item[2]].append(Entry)\r | |
1462 | PatchList = parsePcdInfoFromMapFile(\r | |
1463 | os.path.join(self.OutputDir, self.Name + '.map'),\r | |
1464 | os.path.join(self.OutputDir, self.Name + '.efi')\r | |
1465 | )\r | |
1466 | if PatchList:\r | |
1467 | for Pcd in PatchablePcds:\r | |
1468 | TokenCName = Pcd.TokenCName\r | |
1469 | for PcdItem in GlobalData.MixedPcd:\r | |
1470 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
1471 | TokenCName = PcdItem[0]\r | |
1472 | break\r | |
1473 | for PatchPcd in PatchList:\r | |
1474 | if TokenCName == PatchPcd[0]:\r | |
1475 | break\r | |
1476 | else:\r | |
1477 | continue\r | |
1478 | PcdValue = ''\r | |
1479 | if Pcd.DatumType == 'BOOLEAN':\r | |
1480 | BoolValue = Pcd.DefaultValue.upper()\r | |
1481 | if BoolValue == 'TRUE':\r | |
1482 | Pcd.DefaultValue = '1'\r | |
1483 | elif BoolValue == 'FALSE':\r | |
1484 | Pcd.DefaultValue = '0'\r | |
1485 | \r | |
1486 | if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r | |
1487 | HexFormat = '0x%02x'\r | |
1488 | if Pcd.DatumType == TAB_UINT16:\r | |
1489 | HexFormat = '0x%04x'\r | |
1490 | elif Pcd.DatumType == TAB_UINT32:\r | |
1491 | HexFormat = '0x%08x'\r | |
1492 | elif Pcd.DatumType == TAB_UINT64:\r | |
1493 | HexFormat = '0x%016x'\r | |
1494 | PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r | |
1495 | else:\r | |
1496 | if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r | |
1497 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1498 | "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
1499 | )\r | |
1500 | ArraySize = int(Pcd.MaxDatumSize, 0)\r | |
1501 | PcdValue = Pcd.DefaultValue\r | |
1502 | if PcdValue[0] != '{':\r | |
1503 | Unicode = False\r | |
1504 | if PcdValue[0] == 'L':\r | |
1505 | Unicode = True\r | |
1506 | PcdValue = PcdValue.lstrip('L')\r | |
1507 | PcdValue = eval(PcdValue)\r | |
1508 | NewValue = '{'\r | |
1509 | for Index in range(0, len(PcdValue)):\r | |
1510 | if Unicode:\r | |
1511 | CharVal = ord(PcdValue[Index])\r | |
1512 | NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r | |
1513 | + '0x%02x' % (CharVal >> 8) + ', '\r | |
1514 | else:\r | |
1515 | NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r | |
1516 | Padding = '0x00, '\r | |
1517 | if Unicode:\r | |
1518 | Padding = Padding * 2\r | |
1519 | ArraySize = ArraySize // 2\r | |
1520 | if ArraySize < (len(PcdValue) + 1):\r | |
1521 | if Pcd.MaxSizeUserSet:\r | |
1522 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1523 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
1524 | )\r | |
1525 | else:\r | |
1526 | ArraySize = len(PcdValue) + 1\r | |
1527 | if ArraySize > len(PcdValue) + 1:\r | |
1528 | NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r | |
1529 | PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r | |
1530 | elif len(PcdValue.split(',')) <= ArraySize:\r | |
1531 | PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r | |
1532 | PcdValue += '}'\r | |
1533 | else:\r | |
1534 | if Pcd.MaxSizeUserSet:\r | |
1535 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1536 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
1537 | )\r | |
1538 | else:\r | |
1539 | ArraySize = len(PcdValue) + 1\r | |
1540 | PcdItem = '%s.%s|%s|0x%X' % \\r | |
1541 | (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r | |
1542 | PcdComments = ''\r | |
1543 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
1544 | PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r | |
1545 | if PcdComments:\r | |
1546 | PcdItem = PcdComments + '\n ' + PcdItem\r | |
1547 | AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r | |
1548 | \r | |
1549 | for Pcd in Pcds + VfrPcds:\r | |
1550 | PcdCommentList = []\r | |
1551 | HiiInfo = ''\r | |
1552 | TokenCName = Pcd.TokenCName\r | |
1553 | for PcdItem in GlobalData.MixedPcd:\r | |
1554 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
1555 | TokenCName = PcdItem[0]\r | |
1556 | break\r | |
1557 | if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r | |
1558 | for SkuName in Pcd.SkuInfoList:\r | |
1559 | SkuInfo = Pcd.SkuInfoList[SkuName]\r | |
1560 | HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r | |
1561 | break\r | |
1562 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
1563 | PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r | |
1564 | if HiiInfo:\r | |
1565 | UsageIndex = -1\r | |
1566 | UsageStr = ''\r | |
1567 | for Index, Comment in enumerate(PcdCommentList):\r | |
1568 | for Usage in UsageList:\r | |
1569 | if Comment.find(Usage) != -1:\r | |
1570 | UsageStr = Usage\r | |
1571 | UsageIndex = Index\r | |
1572 | break\r | |
1573 | if UsageIndex != -1:\r | |
1574 | PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r | |
1575 | else:\r | |
1576 | PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r | |
1577 | PcdComments = '\n '.join(PcdCommentList)\r | |
1578 | PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r | |
1579 | if PcdComments:\r | |
1580 | PcdEntry = PcdComments + '\n ' + PcdEntry\r | |
1581 | AsBuiltInfDict['pcd_item'].append(PcdEntry)\r | |
1582 | for Item in self.BuildOption:\r | |
1583 | if 'FLAGS' in self.BuildOption[Item]:\r | |
1584 | AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r | |
1585 | \r | |
1586 | # Generated LibraryClasses section in comments.\r | |
1587 | for Library in self.LibraryAutoGenList:\r | |
1588 | AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r | |
1589 | \r | |
1590 | # Generated UserExtensions TianoCore section.\r | |
1591 | # All tianocore user extensions are copied.\r | |
1592 | UserExtStr = ''\r | |
1593 | for TianoCore in self._GetTianoCoreUserExtensionList():\r | |
1594 | UserExtStr += '\n'.join(TianoCore)\r | |
1595 | ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r | |
1596 | if os.path.isfile(ExtensionFile):\r | |
1597 | shutil.copy2(ExtensionFile, self.OutputDir)\r | |
1598 | AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r | |
1599 | \r | |
1600 | # Generated depex expression section in comments.\r | |
1601 | DepexExpression = self._GetDepexExpresionString()\r | |
1602 | AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r | |
1603 | \r | |
1604 | AsBuiltInf = TemplateString()\r | |
1605 | AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r | |
1606 | \r | |
1607 | SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r | |
1608 | \r | |
1609 | self.IsAsBuiltInfCreated = True\r | |
1610 | \r | |
1611 | def CopyModuleToCache(self):\r | |
1612 | FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
1613 | CreateDirectory (FileDir)\r | |
1614 | HashFile = path.join(self.BuildDir, self.Name + '.hash')\r | |
1615 | if os.path.exists(HashFile):\r | |
1616 | CopyFileOnChange(HashFile, FileDir)\r | |
1617 | ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r | |
1618 | if os.path.exists(ModuleFile):\r | |
1619 | CopyFileOnChange(ModuleFile, FileDir)\r | |
1620 | if not self.OutputFile:\r | |
1621 | Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1622 | self.OutputFile = Ma.Binaries\r | |
1623 | for File in self.OutputFile:\r | |
1624 | File = str(File)\r | |
1625 | if not os.path.isabs(File):\r | |
1626 | File = os.path.join(self.OutputDir, File)\r | |
1627 | if os.path.exists(File):\r | |
1628 | sub_dir = os.path.relpath(File, self.OutputDir)\r | |
1629 | destination_file = os.path.join(FileDir, sub_dir)\r | |
1630 | destination_dir = os.path.dirname(destination_file)\r | |
1631 | CreateDirectory(destination_dir)\r | |
1632 | CopyFileOnChange(File, destination_dir)\r | |
1633 | \r | |
1634 | def AttemptModuleCacheCopy(self):\r | |
1635 | # If library or Module is binary do not skip by hash\r | |
1636 | if self.IsBinaryModule:\r | |
1637 | return False\r | |
1638 | # .inc is contains binary information so do not skip by hash as well\r | |
1639 | for f_ext in self.SourceFileList:\r | |
1640 | if '.inc' in str(f_ext):\r | |
1641 | return False\r | |
1642 | FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.Name, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
1643 | HashFile = path.join(FileDir, self.Name + '.hash')\r | |
1644 | if os.path.exists(HashFile):\r | |
1645 | f = open(HashFile, 'r')\r | |
1646 | CacheHash = f.read()\r | |
1647 | f.close()\r | |
1648 | self.GenModuleHash()\r | |
1649 | if GlobalData.gModuleHash[self.Arch][self.Name]:\r | |
1650 | if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r | |
1651 | for root, dir, files in os.walk(FileDir):\r | |
1652 | for f in files:\r | |
1653 | if self.Name + '.hash' in f:\r | |
1654 | CopyFileOnChange(HashFile, self.BuildDir)\r | |
1655 | else:\r | |
1656 | File = path.join(root, f)\r | |
1657 | sub_dir = os.path.relpath(File, FileDir)\r | |
1658 | destination_file = os.path.join(self.OutputDir, sub_dir)\r | |
1659 | destination_dir = os.path.dirname(destination_file)\r | |
1660 | CreateDirectory(destination_dir)\r | |
1661 | CopyFileOnChange(File, destination_dir)\r | |
1662 | if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r | |
1663 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
1664 | return True\r | |
1665 | return False\r | |
1666 | \r | |
1667 | ## Create makefile for the module and its dependent libraries\r | |
1668 | #\r | |
1669 | # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r | |
1670 | # dependent libraries will be created\r | |
1671 | #\r | |
1672 | @cached_class_function\r | |
1673 | def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r | |
1674 | # nest this function inside it's only caller.\r | |
1675 | def CreateTimeStamp():\r | |
1676 | FileSet = {self.MetaFile.Path}\r | |
1677 | \r | |
1678 | for SourceFile in self.Module.Sources:\r | |
1679 | FileSet.add (SourceFile.Path)\r | |
1680 | \r | |
1681 | for Lib in self.DependentLibraryList:\r | |
1682 | FileSet.add (Lib.MetaFile.Path)\r | |
1683 | \r | |
1684 | for f in self.AutoGenDepSet:\r | |
1685 | FileSet.add (f.Path)\r | |
1686 | \r | |
1687 | if os.path.exists (self.TimeStampPath):\r | |
1688 | os.remove (self.TimeStampPath)\r | |
1689 | with open(self.TimeStampPath, 'w+') as fd:\r | |
1690 | for f in FileSet:\r | |
1691 | fd.write(f)\r | |
1692 | fd.write("\n")\r | |
1693 | \r | |
1694 | # Ignore generating makefile when it is a binary module\r | |
1695 | if self.IsBinaryModule:\r | |
1696 | return\r | |
1697 | \r | |
1698 | self.GenFfsList = GenFfsList\r | |
1699 | \r | |
1700 | if not self.IsLibrary and CreateLibraryMakeFile:\r | |
1701 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
1702 | LibraryAutoGen.CreateMakeFile()\r | |
673d09a2 | 1703 | \r |
e8449e1d FB |
1704 | # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r |
1705 | if not GlobalData.gUseHashCache and self.CanSkip():\r | |
1706 | return\r | |
1707 | \r | |
1708 | if len(self.CustomMakefile) == 0:\r | |
1709 | Makefile = GenMake.ModuleMakefile(self)\r | |
1710 | else:\r | |
1711 | Makefile = GenMake.CustomMakefile(self)\r | |
1712 | if Makefile.Generate():\r | |
1713 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r | |
1714 | (self.Name, self.Arch))\r | |
1715 | else:\r | |
1716 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r | |
1717 | (self.Name, self.Arch))\r | |
1718 | \r | |
1719 | CreateTimeStamp()\r | |
1720 | \r | |
1721 | def CopyBinaryFiles(self):\r | |
1722 | for File in self.Module.Binaries:\r | |
1723 | SrcPath = File.Path\r | |
1724 | DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r | |
1725 | CopyLongFilePath(SrcPath, DstPath)\r | |
1726 | ## Create autogen code for the module and its dependent libraries\r | |
1727 | #\r | |
1728 | # @param CreateLibraryCodeFile Flag indicating if or not the code of\r | |
1729 | # dependent libraries will be created\r | |
1730 | #\r | |
1731 | def CreateCodeFile(self, CreateLibraryCodeFile=True):\r | |
1732 | if self.IsCodeFileCreated:\r | |
1733 | return\r | |
1734 | \r | |
1735 | # Need to generate PcdDatabase even PcdDriver is binarymodule\r | |
1736 | if self.IsBinaryModule and self.PcdIsDriver != '':\r | |
1737 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
1738 | return\r | |
1739 | if self.IsBinaryModule:\r | |
1740 | if self.IsLibrary:\r | |
1741 | self.CopyBinaryFiles()\r | |
1742 | return\r | |
1743 | \r | |
1744 | if not self.IsLibrary and CreateLibraryCodeFile:\r | |
1745 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
1746 | LibraryAutoGen.CreateCodeFile()\r | |
e8449e1d FB |
1747 | # Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping\r |
1748 | if not GlobalData.gUseHashCache and self.CanSkip():\r | |
1749 | return\r | |
1750 | \r | |
1751 | AutoGenList = []\r | |
1752 | IgoredAutoGenList = []\r | |
1753 | \r | |
1754 | for File in self.AutoGenFileList:\r | |
1755 | if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r | |
1756 | AutoGenList.append(str(File))\r | |
1757 | else:\r | |
1758 | IgoredAutoGenList.append(str(File))\r | |
1759 | \r | |
1760 | \r | |
1761 | for ModuleType in self.DepexList:\r | |
1762 | # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r | |
1763 | if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:\r | |
1764 | continue\r | |
1765 | \r | |
1766 | Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r | |
1767 | DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r | |
1768 | \r | |
1769 | if len(Dpx.PostfixNotation) != 0:\r | |
1770 | self.DepexGenerated = True\r | |
1771 | \r | |
1772 | if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r | |
1773 | AutoGenList.append(str(DpxFile))\r | |
1774 | else:\r | |
1775 | IgoredAutoGenList.append(str(DpxFile))\r | |
1776 | \r | |
1777 | if IgoredAutoGenList == []:\r | |
1778 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r | |
1779 | (" ".join(AutoGenList), self.Name, self.Arch))\r | |
1780 | elif AutoGenList == []:\r | |
1781 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r | |
1782 | (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
1783 | else:\r | |
1784 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r | |
1785 | (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
1786 | \r | |
1787 | self.IsCodeFileCreated = True\r | |
1788 | return AutoGenList\r | |
1789 | \r | |
1790 | ## Summarize the ModuleAutoGen objects of all libraries used by this module\r | |
1791 | @cached_property\r | |
1792 | def LibraryAutoGenList(self):\r | |
1793 | RetVal = []\r | |
1794 | for Library in self.DependentLibraryList:\r | |
1795 | La = ModuleAutoGen(\r | |
1796 | self.Workspace,\r | |
1797 | Library.MetaFile,\r | |
1798 | self.BuildTarget,\r | |
1799 | self.ToolChain,\r | |
1800 | self.Arch,\r | |
1801 | self.PlatformInfo.MetaFile,\r | |
1802 | self.DataPipe\r | |
1803 | )\r | |
1804 | La.IsLibrary = True\r | |
1805 | if La not in RetVal:\r | |
1806 | RetVal.append(La)\r | |
1807 | for Lib in La.CodaTargetList:\r | |
1808 | self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r | |
1809 | return RetVal\r | |
1810 | \r | |
1811 | def GenModuleHash(self):\r | |
1812 | # Initialize a dictionary for each arch type\r | |
1813 | if self.Arch not in GlobalData.gModuleHash:\r | |
1814 | GlobalData.gModuleHash[self.Arch] = {}\r | |
1815 | \r | |
1816 | # Early exit if module or library has been hashed and is in memory\r | |
1817 | if self.Name in GlobalData.gModuleHash[self.Arch]:\r | |
1818 | return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r | |
1819 | \r | |
1820 | # Initialze hash object\r | |
1821 | m = hashlib.md5()\r | |
1822 | \r | |
1823 | # Add Platform level hash\r | |
1824 | m.update(GlobalData.gPlatformHash.encode('utf-8'))\r | |
1825 | \r | |
1826 | # Add Package level hash\r | |
1827 | if self.DependentPackageList:\r | |
1828 | for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r | |
1829 | if Pkg.PackageName in GlobalData.gPackageHash:\r | |
1830 | m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r | |
1831 | \r | |
1832 | # Add Library hash\r | |
1833 | if self.LibraryAutoGenList:\r | |
1834 | for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r | |
1835 | if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r | |
1836 | Lib.GenModuleHash()\r | |
1837 | m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r | |
1838 | \r | |
1839 | # Add Module self\r | |
1840 | f = open(str(self.MetaFile), 'rb')\r | |
1841 | Content = f.read()\r | |
1842 | f.close()\r | |
1843 | m.update(Content)\r | |
1844 | \r | |
1845 | # Add Module's source files\r | |
1846 | if self.SourceFileList:\r | |
1847 | for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r | |
1848 | f = open(str(File), 'rb')\r | |
1849 | Content = f.read()\r | |
1850 | f.close()\r | |
1851 | m.update(Content)\r | |
1852 | \r | |
1853 | GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r | |
1854 | \r | |
1855 | return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r | |
1856 | \r | |
1857 | ## Decide whether we can skip the ModuleAutoGen process\r | |
1858 | def CanSkipbyHash(self):\r | |
1859 | # Hashing feature is off\r | |
1860 | if not GlobalData.gUseHashCache:\r | |
1861 | return False\r | |
1862 | \r | |
1863 | # Initialize a dictionary for each arch type\r | |
1864 | if self.Arch not in GlobalData.gBuildHashSkipTracking:\r | |
1865 | GlobalData.gBuildHashSkipTracking[self.Arch] = dict()\r | |
1866 | \r | |
1867 | # If library or Module is binary do not skip by hash\r | |
1868 | if self.IsBinaryModule:\r | |
1869 | return False\r | |
1870 | \r | |
1871 | # .inc is contains binary information so do not skip by hash as well\r | |
1872 | for f_ext in self.SourceFileList:\r | |
1873 | if '.inc' in str(f_ext):\r | |
1874 | return False\r | |
1875 | \r | |
1876 | # Use Cache, if exists and if Module has a copy in cache\r | |
1877 | if GlobalData.gBinCacheSource and self.AttemptModuleCacheCopy():\r | |
1878 | return True\r | |
1879 | \r | |
1880 | # Early exit for libraries that haven't yet finished building\r | |
1881 | HashFile = path.join(self.BuildDir, self.Name + ".hash")\r | |
1882 | if self.IsLibrary and not os.path.exists(HashFile):\r | |
1883 | return False\r | |
1884 | \r | |
1885 | # Return a Boolean based on if can skip by hash, either from memory or from IO.\r | |
1886 | if self.Name not in GlobalData.gBuildHashSkipTracking[self.Arch]:\r | |
1887 | # If hashes are the same, SaveFileOnChange() will return False.\r | |
1888 | GlobalData.gBuildHashSkipTracking[self.Arch][self.Name] = not SaveFileOnChange(HashFile, self.GenModuleHash(), True)\r | |
1889 | return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r | |
1890 | else:\r | |
1891 | return GlobalData.gBuildHashSkipTracking[self.Arch][self.Name]\r | |
1892 | \r | |
1893 | ## Decide whether we can skip the ModuleAutoGen process\r | |
1894 | # If any source file is newer than the module than we cannot skip\r | |
1895 | #\r | |
1896 | def CanSkip(self):\r | |
1897 | if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r | |
1898 | return True\r | |
1899 | if not os.path.exists(self.TimeStampPath):\r | |
1900 | return False\r | |
1901 | #last creation time of the module\r | |
1902 | DstTimeStamp = os.stat(self.TimeStampPath)[8]\r | |
1903 | \r | |
1904 | SrcTimeStamp = self.Workspace._SrcTimeStamp\r | |
1905 | if SrcTimeStamp > DstTimeStamp:\r | |
1906 | return False\r | |
1907 | \r | |
1908 | with open(self.TimeStampPath,'r') as f:\r | |
1909 | for source in f:\r | |
1910 | source = source.rstrip('\n')\r | |
1911 | if not os.path.exists(source):\r | |
1912 | return False\r | |
1913 | if source not in ModuleAutoGen.TimeDict :\r | |
1914 | ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r | |
1915 | if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r | |
1916 | return False\r | |
1917 | GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r | |
1918 | return True\r | |
1919 | \r | |
1920 | @cached_property\r | |
1921 | def TimeStampPath(self):\r | |
1922 | return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r |