]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: enhance the CacheCopyFile method arg names
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
CommitLineData
e8449e1d
FB
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8from AutoGen.AutoGen import AutoGen\r
9from Common.LongFilePathSupport import CopyLongFilePath\r
10from Common.BuildToolError import *\r
11from Common.DataType import *\r
12from Common.Misc import *\r
13from Common.StringUtils import NormPath,GetSplitList\r
14from collections import defaultdict\r
15from Workspace.WorkspaceCommon import OrderedListDict\r
16import os.path as path\r
17import copy\r
18import hashlib\r
19from . import InfSectionParser\r
20from . import GenC\r
21from . import GenMake\r
22from . import GenDepex\r
23from io import BytesIO\r
24from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r
25from Workspace.MetaFileCommentParser import UsageList\r
26from .GenPcdDb import CreatePcdDatabaseCode\r
27from Common.caching import cached_class_function\r
28from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
0e7e7a26
SS
29from AutoGen.CacheIR import ModuleBuildCacheIR\r
30import json\r
94459080 31import tempfile\r
e8449e1d
FB
32\r
33## Mapping Makefile type\r
34gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
35#\r
36# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
37# is the former use /I , the Latter used -I to specify include directories\r
38#\r
39gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
40gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
41\r
42## default file name for AutoGen\r
43gAutoGenCodeFileName = "AutoGen.c"\r
44gAutoGenHeaderFileName = "AutoGen.h"\r
45gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r
46gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r
47gAutoGenDepexFileName = "%(module_name)s.depex"\r
48gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r
49gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r
50gInfSpecVersion = "0x00010017"\r
51\r
52#\r
53# Match name = variable\r
54#\r
55gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r
56#\r
57# The format of guid in efivarstore statement likes following and must be correct:\r
58# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r
59#\r
60gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r
61\r
62#\r
63# Template string to generic AsBuilt INF\r
64#\r
65gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r
66\r
67# DO NOT EDIT\r
68# FILE auto-generated\r
69\r
70[Defines]\r
71 INF_VERSION = ${module_inf_version}\r
72 BASE_NAME = ${module_name}\r
73 FILE_GUID = ${module_guid}\r
74 MODULE_TYPE = ${module_module_type}${BEGIN}\r
75 VERSION_STRING = ${module_version_string}${END}${BEGIN}\r
76 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r
77 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r
78 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r
79 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r
80 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r
81 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r
82 DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r
83 SHADOW = ${module_shadow}${END}${BEGIN}\r
84 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r
85 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r
86 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r
87 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r
88 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r
89 SPEC = ${module_spec}${END}${BEGIN}\r
90 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r
91 MODULE_UNI_FILE = ${module_uni_file}${END}\r
92\r
93[Packages.${module_arch}]${BEGIN}\r
94 ${package_item}${END}\r
95\r
96[Binaries.${module_arch}]${BEGIN}\r
97 ${binary_item}${END}\r
98\r
99[PatchPcd.${module_arch}]${BEGIN}\r
100 ${patchablepcd_item}\r
101${END}\r
102\r
103[Protocols.${module_arch}]${BEGIN}\r
104 ${protocol_item}\r
105${END}\r
106\r
107[Ppis.${module_arch}]${BEGIN}\r
108 ${ppi_item}\r
109${END}\r
110\r
111[Guids.${module_arch}]${BEGIN}\r
112 ${guid_item}\r
113${END}\r
114\r
115[PcdEx.${module_arch}]${BEGIN}\r
116 ${pcd_item}\r
117${END}\r
118\r
119[LibraryClasses.${module_arch}]\r
120## @LIB_INSTANCES${BEGIN}\r
121# ${libraryclasses_item}${END}\r
122\r
123${depexsection_item}\r
124\r
125${userextension_tianocore_item}\r
126\r
127${tail_comments}\r
128\r
129[BuildOptions.${module_arch}]\r
130## @AsBuilt${BEGIN}\r
131## ${flags_item}${END}\r
132""")\r
133#\r
134# extend lists contained in a dictionary with lists stored in another dictionary\r
135# if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r
136#\r
137def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r
138 for Key in CopyFromDict:\r
139 CopyToDict[Key].extend(CopyFromDict[Key])\r
140\r
141# Create a directory specified by a set of path elements and return the full path\r
142def _MakeDir(PathList):\r
143 RetVal = path.join(*PathList)\r
144 CreateDirectory(RetVal)\r
145 return RetVal\r
146\r
147#\r
148# Convert string to C format array\r
149#\r
150def _ConvertStringToByteArray(Value):\r
151 Value = Value.strip()\r
152 if not Value:\r
153 return None\r
154 if Value[0] == '{':\r
155 if not Value.endswith('}'):\r
156 return None\r
157 Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r
158 ValFields = Value.split(',')\r
159 try:\r
160 for Index in range(len(ValFields)):\r
161 ValFields[Index] = str(int(ValFields[Index], 0))\r
162 except ValueError:\r
163 return None\r
164 Value = '{' + ','.join(ValFields) + '}'\r
165 return Value\r
166\r
167 Unicode = False\r
168 if Value.startswith('L"'):\r
169 if not Value.endswith('"'):\r
170 return None\r
171 Value = Value[1:]\r
172 Unicode = True\r
173 elif not Value.startswith('"') or not Value.endswith('"'):\r
174 return None\r
175\r
176 Value = eval(Value) # translate escape character\r
177 NewValue = '{'\r
178 for Index in range(0, len(Value)):\r
179 if Unicode:\r
180 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r
181 else:\r
182 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r
183 Value = NewValue + '0}'\r
184 return Value\r
185\r
186## ModuleAutoGen class\r
187#\r
188# This class encapsules the AutoGen behaviors for the build tools. In addition to\r
189# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r
190# to the [depex] section in module's inf file.\r
191#\r
192class ModuleAutoGen(AutoGen):\r
193 # call super().__init__ then call the worker function with different parameter count\r
194 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
195 if not hasattr(self, "_Init"):\r
196 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r
197 self._Init = True\r
198\r
199 ## Cache the timestamps of metafiles of every module in a class attribute\r
200 #\r
201 TimeDict = {}\r
202\r
203 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
204# check if this module is employed by active platform\r
205 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):\r
206 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r
207 % (MetaFile, Arch))\r
208 return None\r
209 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
210\r
211 ## Initialize ModuleAutoGen\r
212 #\r
213 # @param Workspace EdkIIWorkspaceBuild object\r
214 # @param ModuleFile The path of module file\r
215 # @param Target Build target (DEBUG, RELEASE)\r
216 # @param Toolchain Name of tool chain\r
217 # @param Arch The arch the module supports\r
218 # @param PlatformFile Platform meta-file\r
219 #\r
220 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):\r
221 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r
222 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r
223\r
a000d5d1 224 self.Workspace = Workspace\r
e8449e1d
FB
225 self.WorkspaceDir = ""\r
226 self.PlatformInfo = None\r
227 self.DataPipe = DataPipe\r
228 self.__init_platform_info__()\r
229 self.MetaFile = ModuleFile\r
230 self.SourceDir = self.MetaFile.SubDir\r
231 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r
232\r
233 self.ToolChain = Toolchain\r
234 self.BuildTarget = Target\r
235 self.Arch = Arch\r
236 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r
237 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r
238\r
239 self.IsCodeFileCreated = False\r
240 self.IsAsBuiltInfCreated = False\r
241 self.DepexGenerated = False\r
242\r
243 self.BuildDatabase = self.Workspace.BuildDatabase\r
244 self.BuildRuleOrder = None\r
245 self.BuildTime = 0\r
246\r
247 self._GuidComments = OrderedListDict()\r
248 self._ProtocolComments = OrderedListDict()\r
249 self._PpiComments = OrderedListDict()\r
250 self._BuildTargets = None\r
251 self._IntroBuildTargetList = None\r
252 self._FinalBuildTargetList = None\r
253 self._FileTypes = None\r
254\r
255 self.AutoGenDepSet = set()\r
256 self.ReferenceModules = []\r
257 self.ConstPcd = {}\r
0e7e7a26
SS
258 self.Makefile = None\r
259 self.FileDependCache = {}\r
e8449e1d
FB
260\r
261 def __init_platform_info__(self):\r
262 pinfo = self.DataPipe.Get("P_Info")\r
e8449e1d
FB
263 self.WorkspaceDir = pinfo.get("WorkspaceDir")\r
264 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)\r
265 ## hash() operator of ModuleAutoGen\r
266 #\r
267 # The module file path and arch string will be used to represent\r
268 # hash value of this object\r
269 #\r
270 # @retval int Hash value of the module file path and arch\r
271 #\r
272 @cached_class_function\r
273 def __hash__(self):\r
274 return hash((self.MetaFile, self.Arch))\r
275 def __repr__(self):\r
276 return "%s [%s]" % (self.MetaFile, self.Arch)\r
277\r
278 # Get FixedAtBuild Pcds of this Module\r
279 @cached_property\r
280 def FixedAtBuildPcds(self):\r
281 RetVal = []\r
282 for Pcd in self.ModulePcdList:\r
283 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r
284 continue\r
285 if Pcd not in RetVal:\r
286 RetVal.append(Pcd)\r
287 return RetVal\r
288\r
289 @cached_property\r
290 def FixedVoidTypePcds(self):\r
291 RetVal = {}\r
292 for Pcd in self.FixedAtBuildPcds:\r
293 if Pcd.DatumType == TAB_VOID:\r
294 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:\r
295 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue\r
296 return RetVal\r
297\r
298 @property\r
299 def UniqueBaseName(self):\r
300 ModuleNames = self.DataPipe.Get("M_Name")\r
301 if not ModuleNames:\r
302 return self.Name\r
76e12fa3 303 return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r
e8449e1d
FB
304\r
305 # Macros could be used in build_rule.txt (also Makefile)\r
306 @cached_property\r
307 def Macros(self):\r
308 return OrderedDict((\r
309 ("WORKSPACE" ,self.WorkspaceDir),\r
310 ("MODULE_NAME" ,self.Name),\r
311 ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r
312 ("MODULE_GUID" ,self.Guid),\r
313 ("MODULE_VERSION" ,self.Version),\r
314 ("MODULE_TYPE" ,self.ModuleType),\r
315 ("MODULE_FILE" ,str(self.MetaFile)),\r
316 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r
317 ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r
318 ("MODULE_DIR" ,self.SourceDir),\r
319 ("BASE_NAME" ,self.Name),\r
320 ("ARCH" ,self.Arch),\r
321 ("TOOLCHAIN" ,self.ToolChain),\r
322 ("TOOLCHAIN_TAG" ,self.ToolChain),\r
323 ("TOOL_CHAIN_TAG" ,self.ToolChain),\r
324 ("TARGET" ,self.BuildTarget),\r
325 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r
326 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
327 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
328 ("MODULE_BUILD_DIR" ,self.BuildDir),\r
329 ("OUTPUT_DIR" ,self.OutputDir),\r
330 ("DEBUG_DIR" ,self.DebugDir),\r
331 ("DEST_DIR_OUTPUT" ,self.OutputDir),\r
332 ("DEST_DIR_DEBUG" ,self.DebugDir),\r
333 ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r
334 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r
335 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r
336 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r
337 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r
338 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r
339 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r
340 ))\r
341\r
342 ## Return the module build data object\r
343 @cached_property\r
344 def Module(self):\r
345 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
346\r
347 ## Return the module name\r
348 @cached_property\r
349 def Name(self):\r
350 return self.Module.BaseName\r
351\r
352 ## Return the module DxsFile if exist\r
353 @cached_property\r
354 def DxsFile(self):\r
355 return self.Module.DxsFile\r
356\r
357 ## Return the module meta-file GUID\r
358 @cached_property\r
359 def Guid(self):\r
360 #\r
361 # To build same module more than once, the module path with FILE_GUID overridden has\r
362 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r
363 # in DSC. The overridden GUID can be retrieved from file name\r
364 #\r
365 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r
366 #\r
367 # Length of GUID is 36\r
368 #\r
369 return os.path.basename(self.MetaFile.Path)[:36]\r
370 return self.Module.Guid\r
371\r
372 ## Return the module version\r
373 @cached_property\r
374 def Version(self):\r
375 return self.Module.Version\r
376\r
377 ## Return the module type\r
378 @cached_property\r
379 def ModuleType(self):\r
380 return self.Module.ModuleType\r
381\r
382 ## Return the component type (for Edk.x style of module)\r
383 @cached_property\r
384 def ComponentType(self):\r
385 return self.Module.ComponentType\r
386\r
387 ## Return the build type\r
388 @cached_property\r
389 def BuildType(self):\r
390 return self.Module.BuildType\r
391\r
392 ## Return the PCD_IS_DRIVER setting\r
393 @cached_property\r
394 def PcdIsDriver(self):\r
395 return self.Module.PcdIsDriver\r
396\r
397 ## Return the autogen version, i.e. module meta-file version\r
398 @cached_property\r
399 def AutoGenVersion(self):\r
400 return self.Module.AutoGenVersion\r
401\r
402 ## Check if the module is library or not\r
403 @cached_property\r
404 def IsLibrary(self):\r
405 return bool(self.Module.LibraryClass)\r
406\r
407 ## Check if the module is binary module or not\r
408 @cached_property\r
409 def IsBinaryModule(self):\r
410 return self.Module.IsBinaryModule\r
411\r
412 ## Return the directory to store intermediate files of the module\r
413 @cached_property\r
414 def BuildDir(self):\r
415 return _MakeDir((\r
416 self.PlatformInfo.BuildDir,\r
417 self.Arch,\r
418 self.SourceDir,\r
419 self.MetaFile.BaseName\r
420 ))\r
421\r
422 ## Return the directory to store the intermediate object files of the module\r
423 @cached_property\r
424 def OutputDir(self):\r
425 return _MakeDir((self.BuildDir, "OUTPUT"))\r
426\r
427 ## Return the directory path to store ffs file\r
428 @cached_property\r
429 def FfsOutputDir(self):\r
430 if GlobalData.gFdfParser:\r
431 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
432 return ''\r
433\r
434 ## Return the directory to store auto-gened source files of the module\r
435 @cached_property\r
436 def DebugDir(self):\r
437 return _MakeDir((self.BuildDir, "DEBUG"))\r
438\r
439 ## Return the path of custom file\r
440 @cached_property\r
441 def CustomMakefile(self):\r
442 RetVal = {}\r
443 for Type in self.Module.CustomMakefile:\r
444 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r
445 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
446 RetVal[MakeType] = File\r
447 return RetVal\r
448\r
449 ## Return the directory of the makefile\r
450 #\r
451 # @retval string The directory string of module's makefile\r
452 #\r
453 @cached_property\r
454 def MakeFileDir(self):\r
455 return self.BuildDir\r
456\r
457 ## Return build command string\r
458 #\r
459 # @retval string Build command string\r
460 #\r
461 @cached_property\r
462 def BuildCommand(self):\r
463 return self.PlatformInfo.BuildCommand\r
464\r
bf1ea933
FZ
465 ## Get Module package and Platform package\r
466 #\r
467 # @retval list The list of package object\r
468 #\r
469 @cached_property\r
470 def PackageList(self):\r
471 PkagList = []\r
472 if self.Module.Packages:\r
473 PkagList.extend(self.Module.Packages)\r
474 Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
475 for Package in Platform.Packages:\r
476 if Package in PkagList:\r
477 continue\r
478 PkagList.append(Package)\r
479 return PkagList\r
480\r
481 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
e8449e1d
FB
482 #\r
483 # @retval list The list of package object\r
484 #\r
485 @cached_property\r
486 def DerivedPackageList(self):\r
487 PackageList = []\r
bf1ea933
FZ
488 PackageList.extend(self.PackageList)\r
489 for M in self.DependentLibraryList:\r
e8449e1d
FB
490 for Package in M.Packages:\r
491 if Package in PackageList:\r
492 continue\r
493 PackageList.append(Package)\r
494 return PackageList\r
495\r
496 ## Get the depex string\r
497 #\r
498 # @return : a string contain all depex expression.\r
499 def _GetDepexExpresionString(self):\r
500 DepexStr = ''\r
501 DepexList = []\r
502 ## DPX_SOURCE IN Define section.\r
503 if self.Module.DxsFile:\r
504 return DepexStr\r
505 for M in [self.Module] + self.DependentLibraryList:\r
506 Filename = M.MetaFile.Path\r
507 InfObj = InfSectionParser.InfSectionParser(Filename)\r
508 DepexExpressionList = InfObj.GetDepexExpresionList()\r
509 for DepexExpression in DepexExpressionList:\r
510 for key in DepexExpression:\r
511 Arch, ModuleType = key\r
512 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r
513 # the type of build module is USER_DEFINED.\r
514 # All different DEPEX section tags would be copied into the As Built INF file\r
515 # and there would be separate DEPEX section tags\r
516 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
517 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r
518 DepexList.append({(Arch, ModuleType): DepexExpr})\r
519 else:\r
520 if Arch.upper() == TAB_ARCH_COMMON or \\r
521 (Arch.upper() == self.Arch.upper() and \\r
522 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r
523 DepexList.append({(Arch, ModuleType): DepexExpr})\r
524\r
525 #the type of build module is USER_DEFINED.\r
526 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
527 for Depex in DepexList:\r
528 for key in Depex:\r
529 DepexStr += '[Depex.%s.%s]\n' % key\r
530 DepexStr += '\n'.join('# '+ val for val in Depex[key])\r
531 DepexStr += '\n\n'\r
532 if not DepexStr:\r
533 return '[Depex.%s]\n' % self.Arch\r
534 return DepexStr\r
535\r
536 #the type of build module not is USER_DEFINED.\r
537 Count = 0\r
538 for Depex in DepexList:\r
539 Count += 1\r
540 if DepexStr != '':\r
541 DepexStr += ' AND '\r
542 DepexStr += '('\r
543 for D in Depex.values():\r
544 DepexStr += ' '.join(val for val in D)\r
545 Index = DepexStr.find('END')\r
546 if Index > -1 and Index == len(DepexStr) - 3:\r
547 DepexStr = DepexStr[:-3]\r
548 DepexStr = DepexStr.strip()\r
549 DepexStr += ')'\r
550 if Count == 1:\r
551 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r
552 if not DepexStr:\r
553 return '[Depex.%s]\n' % self.Arch\r
554 return '[Depex.%s]\n# ' % self.Arch + DepexStr\r
555\r
556 ## Merge dependency expression\r
557 #\r
558 # @retval list The token list of the dependency expression after parsed\r
559 #\r
560 @cached_property\r
561 def DepexList(self):\r
562 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
563 return {}\r
564\r
565 DepexList = []\r
566 #\r
567 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r
568 #\r
569 FixedVoidTypePcds = {}\r
570 for M in [self] + self.LibraryAutoGenList:\r
571 FixedVoidTypePcds.update(M.FixedVoidTypePcds)\r
572 for M in [self] + self.LibraryAutoGenList:\r
573 Inherited = False\r
574 for D in M.Module.Depex[self.Arch, self.ModuleType]:\r
575 if DepexList != []:\r
576 DepexList.append('AND')\r
577 DepexList.append('(')\r
578 #replace D with value if D is FixedAtBuild PCD\r
579 NewList = []\r
580 for item in D:\r
581 if '.' not in item:\r
582 NewList.append(item)\r
583 else:\r
584 try:\r
585 Value = FixedVoidTypePcds[item]\r
586 if len(Value.split(',')) != 16:\r
587 EdkLogger.error("build", FORMAT_INVALID,\r
588 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r
589 NewList.append(Value)\r
590 except:\r
591 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r
592\r
593 DepexList.extend(NewList)\r
594 if DepexList[-1] == 'END': # no need of a END at this time\r
595 DepexList.pop()\r
596 DepexList.append(')')\r
597 Inherited = True\r
598 if Inherited:\r
599 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))\r
600 if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r
601 break\r
602 if len(DepexList) > 0:\r
603 EdkLogger.verbose('')\r
604 return {self.ModuleType:DepexList}\r
605\r
606 ## Merge dependency expression\r
607 #\r
608 # @retval list The token list of the dependency expression after parsed\r
609 #\r
610 @cached_property\r
611 def DepexExpressionDict(self):\r
612 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
613 return {}\r
614\r
615 DepexExpressionString = ''\r
616 #\r
617 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r
618 #\r
619 for M in [self.Module] + self.DependentLibraryList:\r
620 Inherited = False\r
621 for D in M.DepexExpression[self.Arch, self.ModuleType]:\r
622 if DepexExpressionString != '':\r
623 DepexExpressionString += ' AND '\r
624 DepexExpressionString += '('\r
625 DepexExpressionString += D\r
626 DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r
627 DepexExpressionString += ')'\r
628 Inherited = True\r
629 if Inherited:\r
630 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r
631 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r
632 break\r
633 if len(DepexExpressionString) > 0:\r
634 EdkLogger.verbose('')\r
635\r
636 return {self.ModuleType:DepexExpressionString}\r
637\r
638 # Get the tiano core user extension, it is contain dependent library.\r
639 # @retval: a list contain tiano core userextension.\r
640 #\r
641 def _GetTianoCoreUserExtensionList(self):\r
642 TianoCoreUserExtentionList = []\r
643 for M in [self.Module] + self.DependentLibraryList:\r
644 Filename = M.MetaFile.Path\r
645 InfObj = InfSectionParser.InfSectionParser(Filename)\r
646 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r
647 for TianoCoreUserExtent in TianoCoreUserExtenList:\r
648 for Section in TianoCoreUserExtent:\r
649 ItemList = Section.split(TAB_SPLIT)\r
650 Arch = self.Arch\r
651 if len(ItemList) == 4:\r
652 Arch = ItemList[3]\r
653 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r
654 TianoCoreList = []\r
655 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r
656 TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r
657 TianoCoreList.append('\n')\r
658 TianoCoreUserExtentionList.append(TianoCoreList)\r
659\r
660 return TianoCoreUserExtentionList\r
661\r
662 ## Return the list of specification version required for the module\r
663 #\r
664 # @retval list The list of specification defined in module file\r
665 #\r
666 @cached_property\r
667 def Specification(self):\r
668 return self.Module.Specification\r
669\r
670 ## Tool option for the module build\r
671 #\r
672 # @param PlatformInfo The object of PlatformBuildInfo\r
673 # @retval dict The dict containing valid options\r
674 #\r
675 @cached_property\r
676 def BuildOption(self):\r
677 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r
678 if self.BuildRuleOrder:\r
679 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r
680 return RetVal\r
681\r
682 ## Get include path list from tool option for the module build\r
683 #\r
684 # @retval list The include path list\r
685 #\r
686 @cached_property\r
687 def BuildOptionIncPathList(self):\r
688 #\r
689 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
690 # is the former use /I , the Latter used -I to specify include directories\r
691 #\r
692 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r
693 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r
694 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
695 BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r
696 else:\r
697 #\r
698 # New ToolChainFamily, don't known whether there is option to specify include directories\r
699 #\r
700 return []\r
701\r
702 RetVal = []\r
703 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r
704 try:\r
705 FlagOption = self.BuildOption[Tool]['FLAGS']\r
706 except KeyError:\r
707 FlagOption = ''\r
708\r
709 if self.ToolChainFamily != 'RVCT':\r
710 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
711 else:\r
712 #\r
713 # RVCT may specify a list of directory seperated by commas\r
714 #\r
715 IncPathList = []\r
716 for Path in BuildOptIncludeRegEx.findall(FlagOption):\r
717 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r
718 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r
719\r
720 #\r
721 # EDK II modules must not reference header files outside of the packages they depend on or\r
722 # within the module's directory tree. Report error if violation.\r
723 #\r
724 if GlobalData.gDisableIncludePathCheck == False:\r
725 for Path in IncPathList:\r
726 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
727 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
728 EdkLogger.error("build",\r
729 PARAMETER_INVALID,\r
730 ExtraData=ErrMsg,\r
731 File=str(self.MetaFile))\r
732 RetVal += IncPathList\r
733 return RetVal\r
734\r
735 ## Return a list of files which can be built from source\r
736 #\r
737 # What kind of files can be built is determined by build rules in\r
738 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r
739 #\r
740 @cached_property\r
741 def SourceFileList(self):\r
742 RetVal = []\r
743 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r
744 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r
745 for F in self.Module.Sources:\r
746 # match tool chain\r
747 if F.TagName not in ToolChainTagSet:\r
748 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r
749 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r
750 continue\r
751 # match tool chain family or build rule family\r
752 if F.ToolChainFamily not in ToolChainFamilySet:\r
753 EdkLogger.debug(\r
754 EdkLogger.DEBUG_0,\r
755 "The file [%s] must be built by tools of [%s], " \\r
756 "but current toolchain family is [%s], buildrule family is [%s]" \\r
757 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r
758 continue\r
759\r
760 # add the file path into search path list for file including\r
761 if F.Dir not in self.IncludePathList:\r
762 self.IncludePathList.insert(0, F.Dir)\r
763 RetVal.append(F)\r
764\r
765 self._MatchBuildRuleOrder(RetVal)\r
766\r
767 for F in RetVal:\r
768 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r
769 return RetVal\r
770\r
771 def _MatchBuildRuleOrder(self, FileList):\r
772 Order_Dict = {}\r
773 self.BuildOption\r
774 for SingleFile in FileList:\r
775 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r
776 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r
777 if key in Order_Dict:\r
778 Order_Dict[key].append(SingleFile.Ext)\r
779 else:\r
780 Order_Dict[key] = [SingleFile.Ext]\r
781\r
782 RemoveList = []\r
783 for F in Order_Dict:\r
784 if len(Order_Dict[F]) > 1:\r
785 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r
786 for Ext in Order_Dict[F][1:]:\r
787 RemoveList.append(F + Ext)\r
788\r
789 for item in RemoveList:\r
790 FileList.remove(item)\r
791\r
792 return FileList\r
793\r
794 ## Return the list of unicode files\r
795 @cached_property\r
796 def UnicodeFileList(self):\r
797 return self.FileTypes.get(TAB_UNICODE_FILE,[])\r
798\r
799 ## Return the list of vfr files\r
800 @cached_property\r
801 def VfrFileList(self):\r
802 return self.FileTypes.get(TAB_VFR_FILE, [])\r
803\r
804 ## Return the list of Image Definition files\r
805 @cached_property\r
806 def IdfFileList(self):\r
807 return self.FileTypes.get(TAB_IMAGE_FILE,[])\r
808\r
809 ## Return a list of files which can be built from binary\r
810 #\r
811 # "Build" binary files are just to copy them to build directory.\r
812 #\r
813 # @retval list The list of files which can be built later\r
814 #\r
815 @cached_property\r
816 def BinaryFileList(self):\r
817 RetVal = []\r
818 for F in self.Module.Binaries:\r
819 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r
820 continue\r
821 RetVal.append(F)\r
822 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r
823 return RetVal\r
824\r
825 @cached_property\r
826 def BuildRules(self):\r
827 RetVal = {}\r
828 BuildRuleDatabase = self.PlatformInfo.BuildRule\r
829 for Type in BuildRuleDatabase.FileTypeList:\r
830 #first try getting build rule by BuildRuleFamily\r
831 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r
832 if not RuleObject:\r
833 # build type is always module type, but ...\r
834 if self.ModuleType != self.BuildType:\r
835 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r
836 #second try getting build rule by ToolChainFamily\r
837 if not RuleObject:\r
838 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r
839 if not RuleObject:\r
840 # build type is always module type, but ...\r
841 if self.ModuleType != self.BuildType:\r
842 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r
843 if not RuleObject:\r
844 continue\r
845 RuleObject = RuleObject.Instantiate(self.Macros)\r
846 RetVal[Type] = RuleObject\r
847 for Ext in RuleObject.SourceFileExtList:\r
848 RetVal[Ext] = RuleObject\r
849 return RetVal\r
850\r
851 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r
852 if self._BuildTargets is None:\r
853 self._IntroBuildTargetList = set()\r
854 self._FinalBuildTargetList = set()\r
855 self._BuildTargets = defaultdict(set)\r
856 self._FileTypes = defaultdict(set)\r
857\r
858 if not BinaryFileList:\r
859 BinaryFileList = self.BinaryFileList\r
860\r
861 SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r
862 if not os.path.exists(SubDirectory):\r
863 CreateDirectory(SubDirectory)\r
864 LastTarget = None\r
865 RuleChain = set()\r
866 SourceList = [File]\r
867 Index = 0\r
868 #\r
869 # Make sure to get build rule order value\r
870 #\r
871 self.BuildOption\r
872\r
873 while Index < len(SourceList):\r
874 Source = SourceList[Index]\r
875 Index = Index + 1\r
876\r
877 if Source != File:\r
878 CreateDirectory(Source.Dir)\r
879\r
880 if File.IsBinary and File == Source and File in BinaryFileList:\r
881 # Skip all files that are not binary libraries\r
882 if not self.IsLibrary:\r
883 continue\r
884 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r
885 elif FileType in self.BuildRules:\r
886 RuleObject = self.BuildRules[FileType]\r
887 elif Source.Ext in self.BuildRules:\r
888 RuleObject = self.BuildRules[Source.Ext]\r
889 else:\r
890 # stop at no more rules\r
891 if LastTarget:\r
892 self._FinalBuildTargetList.add(LastTarget)\r
893 break\r
894\r
895 FileType = RuleObject.SourceFileType\r
896 self._FileTypes[FileType].add(Source)\r
897\r
898 # stop at STATIC_LIBRARY for library\r
899 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r
900 if LastTarget:\r
901 self._FinalBuildTargetList.add(LastTarget)\r
902 break\r
903\r
904 Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r
905 if not Target:\r
906 if LastTarget:\r
907 self._FinalBuildTargetList.add(LastTarget)\r
908 break\r
909 elif not Target.Outputs:\r
910 # Only do build for target with outputs\r
911 self._FinalBuildTargetList.add(Target)\r
912\r
913 self._BuildTargets[FileType].add(Target)\r
914\r
915 if not Source.IsBinary and Source == File:\r
916 self._IntroBuildTargetList.add(Target)\r
917\r
918 # to avoid cyclic rule\r
919 if FileType in RuleChain:\r
920 break\r
921\r
922 RuleChain.add(FileType)\r
923 SourceList.extend(Target.Outputs)\r
924 LastTarget = Target\r
925 FileType = TAB_UNKNOWN_FILE\r
926\r
927 @cached_property\r
928 def Targets(self):\r
929 if self._BuildTargets is None:\r
930 self._IntroBuildTargetList = set()\r
931 self._FinalBuildTargetList = set()\r
932 self._BuildTargets = defaultdict(set)\r
933 self._FileTypes = defaultdict(set)\r
934\r
935 #TRICK: call SourceFileList property to apply build rule for source files\r
936 self.SourceFileList\r
937\r
938 #TRICK: call _GetBinaryFileList to apply build rule for binary files\r
939 self.BinaryFileList\r
940\r
941 return self._BuildTargets\r
942\r
943 @cached_property\r
944 def IntroTargetList(self):\r
945 self.Targets\r
946 return self._IntroBuildTargetList\r
947\r
948 @cached_property\r
949 def CodaTargetList(self):\r
950 self.Targets\r
951 return self._FinalBuildTargetList\r
952\r
953 @cached_property\r
954 def FileTypes(self):\r
955 self.Targets\r
956 return self._FileTypes\r
957\r
bf1ea933 958 ## Get the list of package object the module depends on and the Platform depends on\r
e8449e1d
FB
959 #\r
960 # @retval list The package object list\r
961 #\r
962 @cached_property\r
963 def DependentPackageList(self):\r
bf1ea933 964 return self.PackageList\r
e8449e1d
FB
965\r
966 ## Return the list of auto-generated code file\r
967 #\r
968 # @retval list The list of auto-generated file\r
969 #\r
970 @cached_property\r
971 def AutoGenFileList(self):\r
972 AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r
973 UniStringBinBuffer = BytesIO()\r
974 IdfGenBinBuffer = BytesIO()\r
975 RetVal = {}\r
976 AutoGenC = TemplateString()\r
977 AutoGenH = TemplateString()\r
978 StringH = TemplateString()\r
979 StringIdf = TemplateString()\r
980 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r
981 #\r
982 # AutoGen.c is generated if there are library classes in inf, or there are object files\r
983 #\r
984 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r
985 or TAB_OBJECT_FILE in self.FileTypes):\r
986 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r
987 RetVal[AutoFile] = str(AutoGenC)\r
988 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
989 if str(AutoGenH) != "":\r
990 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r
991 RetVal[AutoFile] = str(AutoGenH)\r
992 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
993 if str(StringH) != "":\r
994 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
995 RetVal[AutoFile] = str(StringH)\r
996 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
997 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
998 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
999 RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
1000 AutoFile.IsBinary = True\r
1001 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
1002 if UniStringBinBuffer is not None:\r
1003 UniStringBinBuffer.close()\r
1004 if str(StringIdf) != "":\r
1005 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
1006 RetVal[AutoFile] = str(StringIdf)\r
1007 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
1008 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
1009 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
1010 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
1011 AutoFile.IsBinary = True\r
1012 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
1013 if IdfGenBinBuffer is not None:\r
1014 IdfGenBinBuffer.close()\r
1015 return RetVal\r
1016\r
1017 ## Return the list of library modules explicitly or implicitly used by this module\r
1018 @cached_property\r
1019 def DependentLibraryList(self):\r
1020 # only merge library classes and PCD for non-library module\r
1021 if self.IsLibrary:\r
1022 return []\r
1023 return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r
1024\r
1025 ## Get the list of PCDs from current module\r
1026 #\r
1027 # @retval list The list of PCD\r
1028 #\r
1029 @cached_property\r
1030 def ModulePcdList(self):\r
1031 # apply PCD settings from platform\r
1032 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r
1033\r
1034 return RetVal\r
1035 @cached_property\r
1036 def _PcdComments(self):\r
1037 ReVal = OrderedListDict()\r
1038 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)\r
1039 if not self.IsLibrary:\r
1040 for Library in self.DependentLibraryList:\r
1041 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)\r
1042 return ReVal\r
1043\r
1044 ## Get the list of PCDs from dependent libraries\r
1045 #\r
1046 # @retval list The list of PCD\r
1047 #\r
1048 @cached_property\r
1049 def LibraryPcdList(self):\r
1050 if self.IsLibrary:\r
1051 return []\r
1052 RetVal = []\r
1053 Pcds = set()\r
1054 # get PCDs from dependent libraries\r
1055 for Library in self.DependentLibraryList:\r
1056 PcdsInLibrary = OrderedDict()\r
1057 for Key in Library.Pcds:\r
1058 # skip duplicated PCDs\r
1059 if Key in self.Module.Pcds or Key in Pcds:\r
1060 continue\r
1061 Pcds.add(Key)\r
1062 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r
1063 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r
1064 return RetVal\r
1065\r
1066 ## Get the GUID value mapping\r
1067 #\r
1068 # @retval dict The mapping between GUID cname and its value\r
1069 #\r
1070 @cached_property\r
1071 def GuidList(self):\r
1072 RetVal = self.Module.Guids\r
1073 for Library in self.DependentLibraryList:\r
1074 RetVal.update(Library.Guids)\r
1075 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r
1076 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r
1077 return RetVal\r
1078\r
1079 @cached_property\r
1080 def GetGuidsUsedByPcd(self):\r
1081 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r
1082 for Library in self.DependentLibraryList:\r
1083 RetVal.update(Library.GetGuidsUsedByPcd())\r
1084 return RetVal\r
1085 ## Get the protocol value mapping\r
1086 #\r
1087 # @retval dict The mapping between protocol cname and its value\r
1088 #\r
1089 @cached_property\r
1090 def ProtocolList(self):\r
1091 RetVal = OrderedDict(self.Module.Protocols)\r
1092 for Library in self.DependentLibraryList:\r
1093 RetVal.update(Library.Protocols)\r
1094 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r
1095 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r
1096 return RetVal\r
1097\r
1098 ## Get the PPI value mapping\r
1099 #\r
1100 # @retval dict The mapping between PPI cname and its value\r
1101 #\r
1102 @cached_property\r
1103 def PpiList(self):\r
1104 RetVal = OrderedDict(self.Module.Ppis)\r
1105 for Library in self.DependentLibraryList:\r
1106 RetVal.update(Library.Ppis)\r
1107 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r
1108 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r
1109 return RetVal\r
1110\r
1111 ## Get the list of include search path\r
1112 #\r
1113 # @retval list The list path\r
1114 #\r
1115 @cached_property\r
1116 def IncludePathList(self):\r
1117 RetVal = []\r
1118 RetVal.append(self.MetaFile.Dir)\r
1119 RetVal.append(self.DebugDir)\r
1120\r
bf1ea933 1121 for Package in self.PackageList:\r
e8449e1d
FB
1122 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
1123 if PackageDir not in RetVal:\r
1124 RetVal.append(PackageDir)\r
1125 IncludesList = Package.Includes\r
1126 if Package._PrivateIncludes:\r
1127 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r
1128 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
1129 for Inc in IncludesList:\r
1130 if Inc not in RetVal:\r
1131 RetVal.append(str(Inc))\r
0c3e8e99 1132 RetVal.extend(self.IncPathFromBuildOptions)\r
e8449e1d
FB
1133 return RetVal\r
1134\r
0c3e8e99
BF
1135 @cached_property\r
1136 def IncPathFromBuildOptions(self):\r
1137 IncPathList = []\r
1138 for tool in self.BuildOption:\r
1139 if 'FLAGS' in self.BuildOption[tool]:\r
1140 flags = self.BuildOption[tool]['FLAGS']\r
1141 whitespace = False\r
1142 for flag in flags.split(" "):\r
1143 flag = flag.strip()\r
1144 if flag.startswith(("/I","-I")):\r
1145 if len(flag)>2:\r
1146 if os.path.exists(flag[2:]):\r
1147 IncPathList.append(flag[2:])\r
1148 else:\r
1149 whitespace = True\r
1150 continue\r
1151 if whitespace and flag:\r
1152 if os.path.exists(flag):\r
1153 IncPathList.append(flag)\r
1154 whitespace = False\r
1155 return IncPathList\r
1156\r
e8449e1d
FB
1157 @cached_property\r
1158 def IncludePathLength(self):\r
1159 return sum(len(inc)+1 for inc in self.IncludePathList)\r
1160\r
82407bd1
RC
1161 ## Get the list of include paths from the packages\r
1162 #\r
1163 # @IncludesList list The list path\r
1164 #\r
1165 @cached_property\r
1166 def PackageIncludePathList(self):\r
1167 IncludesList = []\r
bf1ea933 1168 for Package in self.PackageList:\r
82407bd1
RC
1169 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
1170 IncludesList = Package.Includes\r
1171 if Package._PrivateIncludes:\r
1172 if not self.MetaFile.Path.startswith(PackageDir):\r
1173 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
1174 return IncludesList\r
1175\r
e8449e1d
FB
1176 ## Get HII EX PCDs which maybe used by VFR\r
1177 #\r
1178 # efivarstore used by VFR may relate with HII EX PCDs\r
1179 # Get the variable name and GUID from efivarstore and HII EX PCD\r
1180 # List the HII EX PCDs in As Built INF if both name and GUID match.\r
1181 #\r
1182 # @retval list HII EX PCDs\r
1183 #\r
1184 def _GetPcdsMaybeUsedByVfr(self):\r
1185 if not self.SourceFileList:\r
1186 return []\r
1187\r
1188 NameGuids = set()\r
1189 for SrcFile in self.SourceFileList:\r
1190 if SrcFile.Ext.lower() != '.vfr':\r
1191 continue\r
1192 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r
1193 if not os.path.exists(Vfri):\r
1194 continue\r
1195 VfriFile = open(Vfri, 'r')\r
1196 Content = VfriFile.read()\r
1197 VfriFile.close()\r
1198 Pos = Content.find('efivarstore')\r
1199 while Pos != -1:\r
1200 #\r
1201 # Make sure 'efivarstore' is the start of efivarstore statement\r
1202 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r
1203 #\r
1204 Index = Pos - 1\r
1205 while Index >= 0 and Content[Index] in ' \t\r\n':\r
1206 Index -= 1\r
1207 if Index >= 0 and Content[Index] != ';':\r
1208 Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r
1209 continue\r
1210 #\r
1211 # 'efivarstore' must be followed by name and guid\r
1212 #\r
1213 Name = gEfiVarStoreNamePattern.search(Content, Pos)\r
1214 if not Name:\r
1215 break\r
1216 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r
1217 if not Guid:\r
1218 break\r
1219 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
1220 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r
1221 Pos = Content.find('efivarstore', Name.end())\r
1222 if not NameGuids:\r
1223 return []\r
1224 HiiExPcds = []\r
1225 for Pcd in self.PlatformInfo.Pcds.values():\r
1226 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r
1227 continue\r
1228 for SkuInfo in Pcd.SkuInfoList.values():\r
1229 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r
1230 if not Value:\r
1231 continue\r
1232 Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r
1233 Guid = GuidStructureStringToGuidString(Value)\r
1234 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r
1235 HiiExPcds.append(Pcd)\r
1236 break\r
1237\r
1238 return HiiExPcds\r
1239\r
1240 def _GenOffsetBin(self):\r
1241 VfrUniBaseName = {}\r
1242 for SourceFile in self.Module.Sources:\r
1243 if SourceFile.Type.upper() == ".VFR" :\r
1244 #\r
1245 # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r
1246 #\r
1247 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r
1248 elif SourceFile.Type.upper() == ".UNI" :\r
1249 #\r
1250 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r
1251 #\r
1252 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r
1253\r
1254 if not VfrUniBaseName:\r
1255 return None\r
1256 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r
1257 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r
1258 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r
1259 if not VfrUniOffsetList:\r
1260 return None\r
1261\r
1262 OutputName = '%sOffset.bin' % self.Name\r
1263 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r
1264\r
1265 try:\r
1266 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r
1267 except:\r
1268 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
1269\r
1270 # Use a instance of BytesIO to cache data\r
1271 fStringIO = BytesIO()\r
1272\r
1273 for Item in VfrUniOffsetList:\r
1274 if (Item[0].find("Strings") != -1):\r
1275 #\r
1276 # UNI offset in image.\r
1277 # GUID + Offset\r
1278 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
1279 #\r
1280 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
1281 fStringIO.write(UniGuid)\r
1282 UniValue = pack ('Q', int (Item[1], 16))\r
1283 fStringIO.write (UniValue)\r
1284 else:\r
1285 #\r
1286 # VFR binary offset in image.\r
1287 # GUID + Offset\r
1288 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
1289 #\r
1290 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
1291 fStringIO.write(VfrGuid)\r
1292 VfrValue = pack ('Q', int (Item[1], 16))\r
1293 fStringIO.write (VfrValue)\r
1294 #\r
1295 # write data into file.\r
1296 #\r
1297 try :\r
1298 fInputfile.write (fStringIO.getvalue())\r
1299 except:\r
1300 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r
1301 "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r
1302\r
1303 fStringIO.close ()\r
1304 fInputfile.close ()\r
1305 return OutputName\r
d01a9986 1306\r
e8449e1d
FB
1307 @cached_property\r
1308 def OutputFile(self):\r
1309 retVal = set()\r
40db176d 1310\r
91f6c533 1311 for Root, Dirs, Files in os.walk(self.BuildDir):\r
e8449e1d 1312 for File in Files:\r
40db176d 1313 # lib file is already added through above CodaTargetList, skip it here\r
91f6c533
SS
1314 if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
1315 NewFile = path.join(Root, File)\r
40db176d 1316 retVal.add(NewFile)\r
e8449e1d 1317\r
40db176d 1318 for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
d01a9986 1319 for File in Files:\r
91f6c533 1320 NewFile = path.join(Root, File)\r
40db176d 1321 retVal.add(NewFile)\r
d01a9986 1322\r
e8449e1d
FB
1323 return retVal\r
1324\r
1325 ## Create AsBuilt INF file the module\r
1326 #\r
1327 def CreateAsBuiltInf(self):\r
1328\r
1329 if self.IsAsBuiltInfCreated:\r
1330 return\r
1331\r
1332 # Skip INF file generation for libraries\r
1333 if self.IsLibrary:\r
1334 return\r
1335\r
1336 # Skip the following code for modules with no source files\r
1337 if not self.SourceFileList:\r
1338 return\r
1339\r
1340 # Skip the following code for modules without any binary files\r
1341 if self.BinaryFileList:\r
1342 return\r
1343\r
1344 ### TODO: How to handles mixed source and binary modules\r
1345\r
1346 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r
1347 # Also find all packages that the DynamicEx PCDs depend on\r
1348 Pcds = []\r
1349 PatchablePcds = []\r
1350 Packages = []\r
1351 PcdCheckList = []\r
1352 PcdTokenSpaceList = []\r
1353 for Pcd in self.ModulePcdList + self.LibraryPcdList:\r
1354 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r
1355 PatchablePcds.append(Pcd)\r
1356 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r
1357 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
1358 if Pcd not in Pcds:\r
1359 Pcds.append(Pcd)\r
1360 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r
1361 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r
1362 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r
1363 GuidList = OrderedDict(self.GuidList)\r
1364 for TokenSpace in self.GetGuidsUsedByPcd:\r
1365 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r
1366 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r
1367 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r
1368 GuidList.pop(TokenSpace)\r
1369 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r
1370 for Package in self.DerivedPackageList:\r
1371 if Package in Packages:\r
1372 continue\r
1373 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r
1374 Found = False\r
1375 for Index in range(len(BeChecked)):\r
1376 for Item in CheckList[Index]:\r
1377 if Item in BeChecked[Index]:\r
1378 Packages.append(Package)\r
1379 Found = True\r
1380 break\r
1381 if Found:\r
1382 break\r
1383\r
1384 VfrPcds = self._GetPcdsMaybeUsedByVfr()\r
1385 for Pkg in self.PlatformInfo.PackageList:\r
1386 if Pkg in Packages:\r
1387 continue\r
1388 for VfrPcd in VfrPcds:\r
1389 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r
1390 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r
1391 Packages.append(Pkg)\r
1392 break\r
1393\r
1394 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r
1395 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r
1396 Guid = self.Guid\r
1397 MDefs = self.Module.Defines\r
1398\r
1399 AsBuiltInfDict = {\r
1400 'module_name' : self.Name,\r
1401 'module_guid' : Guid,\r
1402 'module_module_type' : ModuleType,\r
1403 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r
1404 'pcd_is_driver_string' : [],\r
1405 'module_uefi_specification_version' : [],\r
1406 'module_pi_specification_version' : [],\r
1407 'module_entry_point' : self.Module.ModuleEntryPointList,\r
1408 'module_unload_image' : self.Module.ModuleUnloadImageList,\r
1409 'module_constructor' : self.Module.ConstructorList,\r
1410 'module_destructor' : self.Module.DestructorList,\r
1411 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r
1412 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r
1413 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r
1414 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r
1415 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r
1416 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r
1417 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r
1418 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r
1419 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r
1420 'module_arch' : self.Arch,\r
1421 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r
1422 'binary_item' : [],\r
1423 'patchablepcd_item' : [],\r
1424 'pcd_item' : [],\r
1425 'protocol_item' : [],\r
1426 'ppi_item' : [],\r
1427 'guid_item' : [],\r
1428 'flags_item' : [],\r
1429 'libraryclasses_item' : []\r
1430 }\r
1431\r
1432 if 'MODULE_UNI_FILE' in MDefs:\r
1433 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r
1434 if os.path.isfile(UNIFile):\r
1435 shutil.copy2(UNIFile, self.OutputDir)\r
1436\r
1437 if self.AutoGenVersion > int(gInfSpecVersion, 0):\r
1438 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r
1439 else:\r
1440 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r
1441\r
1442 if DriverType:\r
1443 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r
1444\r
1445 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r
1446 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r
1447 if 'PI_SPECIFICATION_VERSION' in self.Specification:\r
1448 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r
1449\r
1450 OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
1451 DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
1452 for Item in self.CodaTargetList:\r
1453 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
1454 if os.path.isabs(File):\r
1455 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
1456 if Item.Target.Ext.lower() == '.aml':\r
1457 AsBuiltInfDict['binary_item'].append('ASL|' + File)\r
1458 elif Item.Target.Ext.lower() == '.acpi':\r
1459 AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r
1460 elif Item.Target.Ext.lower() == '.efi':\r
1461 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r
1462 else:\r
1463 AsBuiltInfDict['binary_item'].append('BIN|' + File)\r
1464 if not self.DepexGenerated:\r
1465 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r
1466 if os.path.exists(DepexFile):\r
1467 self.DepexGenerated = True\r
1468 if self.DepexGenerated:\r
1469 if self.ModuleType in [SUP_MODULE_PEIM]:\r
1470 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r
1471 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r
1472 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r
1473 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r
1474 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r
1475\r
1476 Bin = self._GenOffsetBin()\r
1477 if Bin:\r
1478 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r
1479\r
1480 for Root, Dirs, Files in os.walk(OutputDir):\r
1481 for File in Files:\r
1482 if File.lower().endswith('.pdb'):\r
1483 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r
1484 HeaderComments = self.Module.HeaderComments\r
1485 StartPos = 0\r
1486 for Index in range(len(HeaderComments)):\r
1487 if HeaderComments[Index].find('@BinaryHeader') != -1:\r
1488 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r
1489 StartPos = Index\r
1490 break\r
1491 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r
1492 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r
1493\r
1494 GenList = [\r
1495 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r
1496 (self.PpiList, self._PpiComments, 'ppi_item'),\r
1497 (GuidList, self._GuidComments, 'guid_item')\r
1498 ]\r
1499 for Item in GenList:\r
1500 for CName in Item[0]:\r
1501 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r
1502 Entry = Comments + '\n ' + CName if Comments else CName\r
1503 AsBuiltInfDict[Item[2]].append(Entry)\r
1504 PatchList = parsePcdInfoFromMapFile(\r
1505 os.path.join(self.OutputDir, self.Name + '.map'),\r
1506 os.path.join(self.OutputDir, self.Name + '.efi')\r
1507 )\r
1508 if PatchList:\r
1509 for Pcd in PatchablePcds:\r
1510 TokenCName = Pcd.TokenCName\r
1511 for PcdItem in GlobalData.MixedPcd:\r
1512 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
1513 TokenCName = PcdItem[0]\r
1514 break\r
1515 for PatchPcd in PatchList:\r
1516 if TokenCName == PatchPcd[0]:\r
1517 break\r
1518 else:\r
1519 continue\r
1520 PcdValue = ''\r
1521 if Pcd.DatumType == 'BOOLEAN':\r
1522 BoolValue = Pcd.DefaultValue.upper()\r
1523 if BoolValue == 'TRUE':\r
1524 Pcd.DefaultValue = '1'\r
1525 elif BoolValue == 'FALSE':\r
1526 Pcd.DefaultValue = '0'\r
1527\r
1528 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r
1529 HexFormat = '0x%02x'\r
1530 if Pcd.DatumType == TAB_UINT16:\r
1531 HexFormat = '0x%04x'\r
1532 elif Pcd.DatumType == TAB_UINT32:\r
1533 HexFormat = '0x%08x'\r
1534 elif Pcd.DatumType == TAB_UINT64:\r
1535 HexFormat = '0x%016x'\r
1536 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r
1537 else:\r
1538 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
1539 EdkLogger.error("build", AUTOGEN_ERROR,\r
1540 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1541 )\r
1542 ArraySize = int(Pcd.MaxDatumSize, 0)\r
1543 PcdValue = Pcd.DefaultValue\r
1544 if PcdValue[0] != '{':\r
1545 Unicode = False\r
1546 if PcdValue[0] == 'L':\r
1547 Unicode = True\r
1548 PcdValue = PcdValue.lstrip('L')\r
1549 PcdValue = eval(PcdValue)\r
1550 NewValue = '{'\r
1551 for Index in range(0, len(PcdValue)):\r
1552 if Unicode:\r
1553 CharVal = ord(PcdValue[Index])\r
1554 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r
1555 + '0x%02x' % (CharVal >> 8) + ', '\r
1556 else:\r
1557 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r
1558 Padding = '0x00, '\r
1559 if Unicode:\r
1560 Padding = Padding * 2\r
1561 ArraySize = ArraySize // 2\r
1562 if ArraySize < (len(PcdValue) + 1):\r
1563 if Pcd.MaxSizeUserSet:\r
1564 EdkLogger.error("build", AUTOGEN_ERROR,\r
1565 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1566 )\r
1567 else:\r
1568 ArraySize = len(PcdValue) + 1\r
1569 if ArraySize > len(PcdValue) + 1:\r
1570 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r
1571 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r
1572 elif len(PcdValue.split(',')) <= ArraySize:\r
1573 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r
1574 PcdValue += '}'\r
1575 else:\r
1576 if Pcd.MaxSizeUserSet:\r
1577 EdkLogger.error("build", AUTOGEN_ERROR,\r
1578 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1579 )\r
1580 else:\r
1581 ArraySize = len(PcdValue) + 1\r
1582 PcdItem = '%s.%s|%s|0x%X' % \\r
1583 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r
1584 PcdComments = ''\r
1585 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
1586 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r
1587 if PcdComments:\r
1588 PcdItem = PcdComments + '\n ' + PcdItem\r
1589 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r
1590\r
1591 for Pcd in Pcds + VfrPcds:\r
1592 PcdCommentList = []\r
1593 HiiInfo = ''\r
1594 TokenCName = Pcd.TokenCName\r
1595 for PcdItem in GlobalData.MixedPcd:\r
1596 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
1597 TokenCName = PcdItem[0]\r
1598 break\r
1599 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r
1600 for SkuName in Pcd.SkuInfoList:\r
1601 SkuInfo = Pcd.SkuInfoList[SkuName]\r
1602 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r
1603 break\r
1604 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
1605 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r
1606 if HiiInfo:\r
1607 UsageIndex = -1\r
1608 UsageStr = ''\r
1609 for Index, Comment in enumerate(PcdCommentList):\r
1610 for Usage in UsageList:\r
1611 if Comment.find(Usage) != -1:\r
1612 UsageStr = Usage\r
1613 UsageIndex = Index\r
1614 break\r
1615 if UsageIndex != -1:\r
1616 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r
1617 else:\r
1618 PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r
1619 PcdComments = '\n '.join(PcdCommentList)\r
1620 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r
1621 if PcdComments:\r
1622 PcdEntry = PcdComments + '\n ' + PcdEntry\r
1623 AsBuiltInfDict['pcd_item'].append(PcdEntry)\r
1624 for Item in self.BuildOption:\r
1625 if 'FLAGS' in self.BuildOption[Item]:\r
1626 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r
1627\r
1628 # Generated LibraryClasses section in comments.\r
1629 for Library in self.LibraryAutoGenList:\r
1630 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r
1631\r
1632 # Generated UserExtensions TianoCore section.\r
1633 # All tianocore user extensions are copied.\r
1634 UserExtStr = ''\r
1635 for TianoCore in self._GetTianoCoreUserExtensionList():\r
1636 UserExtStr += '\n'.join(TianoCore)\r
1637 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r
1638 if os.path.isfile(ExtensionFile):\r
1639 shutil.copy2(ExtensionFile, self.OutputDir)\r
1640 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
1641\r
1642 # Generated depex expression section in comments.\r
1643 DepexExpression = self._GetDepexExpresionString()\r
1644 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r
1645\r
1646 AsBuiltInf = TemplateString()\r
1647 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r
1648\r
1649 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r
1650\r
1651 self.IsAsBuiltInfCreated = True\r
1652\r
3bfbc915
SS
1653 def CacheCopyFile(self, DestDir, SourceDir, File):\r
1654 sub_dir = os.path.relpath(File, SourceDir)\r
1655 destination_file = os.path.join(DestDir, sub_dir)\r
0e7e7a26
SS
1656 destination_dir = os.path.dirname(destination_file)\r
1657 CreateDirectory(destination_dir)\r
1658 try:\r
1659 CopyFileOnChange(File, destination_dir)\r
1660 except:\r
1661 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r
1662 return\r
1663\r
e8449e1d 1664 def CopyModuleToCache(self):\r
0e7e7a26
SS
1665 self.GenPreMakefileHash(GlobalData.gCacheIR)\r
1666 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
1667 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
1668 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1669 return False\r
1670\r
1671 self.GenMakeHash(GlobalData.gCacheIR)\r
1672 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
1673 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
1674 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
1675 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1676 return False\r
1677\r
1678 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
1679 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
1680 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
1681\r
e8449e1d 1682 CreateDirectory (FileDir)\r
0e7e7a26 1683 self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
e8449e1d
FB
1684 ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
1685 if os.path.exists(ModuleFile):\r
1686 CopyFileOnChange(ModuleFile, FileDir)\r
1687 if not self.OutputFile:\r
1688 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
1689 self.OutputFile = Ma.Binaries\r
1690 for File in self.OutputFile:\r
e8449e1d 1691 if os.path.exists(File):\r
40db176d 1692 if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
d01a9986
SS
1693 self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)\r
1694 else:\r
1695 self.CacheCopyFile(FileDir, self.OutputDir, File)\r
e8449e1d 1696\r
0e7e7a26
SS
1697 def SaveHashChainFileToCache(self, gDict):\r
1698 if not GlobalData.gBinCacheDest:\r
e8449e1d 1699 return False\r
0e7e7a26
SS
1700\r
1701 self.GenPreMakefileHash(gDict)\r
1702 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
1703 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
1704 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1705 return False\r
1706\r
1707 self.GenMakeHash(gDict)\r
1708 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
1709 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
1710 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
1711 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1712 return False\r
1713\r
1714 # save the hash chain list as cache file\r
1715 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
1716 CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
1717 CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
1718 ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
1719 MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
1720 ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
1721\r
1722 # save the HashChainDict as json file\r
1723 CreateDirectory (CacheDestDir)\r
1724 CreateDirectory (CacheHashDestDir)\r
1725 try:\r
1726 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
1727 if os.path.exists(ModuleHashPair):\r
94459080
SS
1728 with open(ModuleHashPair, 'r') as f:\r
1729 ModuleHashPairList = json.load(f)\r
0e7e7a26
SS
1730 PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
1731 MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
1732 ModuleHashPairList.append((PreMakeHash, MakeHash))\r
1733 ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
1734 with open(ModuleHashPair, 'w') as f:\r
1735 json.dump(ModuleHashPairList, f, indent=2)\r
1736 except:\r
1737 EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
1738 return False\r
1739\r
1740 try:\r
1741 with open(MakeHashChain, 'w') as f:\r
1742 json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
1743 except:\r
1744 EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
1745 return False\r
1746\r
1747 try:\r
1748 with open(ModuleFilesChain, 'w') as f:\r
1749 json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
1750 except:\r
1751 EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
1752 return False\r
1753\r
1754 # save the autogenfile and makefile for debug usage\r
1755 CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
1756 CreateDirectory (CacheDebugDir)\r
1757 CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
1758 if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
1759 for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
1760 CopyFileOnChange(str(File), CacheDebugDir)\r
1761\r
1762 return True\r
e8449e1d
FB
1763\r
1764 ## Create makefile for the module and its dependent libraries\r
1765 #\r
1766 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
1767 # dependent libraries will be created\r
1768 #\r
1769 @cached_class_function\r
1770 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
0e7e7a26
SS
1771 gDict = GlobalData.gCacheIR\r
1772 if (self.MetaFile.Path, self.Arch) in gDict and \\r
1773 gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
1774 return\r
1775\r
e8449e1d
FB
1776 # nest this function inside it's only caller.\r
1777 def CreateTimeStamp():\r
1778 FileSet = {self.MetaFile.Path}\r
1779\r
1780 for SourceFile in self.Module.Sources:\r
1781 FileSet.add (SourceFile.Path)\r
1782\r
1783 for Lib in self.DependentLibraryList:\r
1784 FileSet.add (Lib.MetaFile.Path)\r
1785\r
1786 for f in self.AutoGenDepSet:\r
1787 FileSet.add (f.Path)\r
1788\r
1789 if os.path.exists (self.TimeStampPath):\r
1790 os.remove (self.TimeStampPath)\r
df43ea6c
FB
1791\r
1792 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
e8449e1d
FB
1793\r
1794 # Ignore generating makefile when it is a binary module\r
1795 if self.IsBinaryModule:\r
1796 return\r
1797\r
1798 self.GenFfsList = GenFfsList\r
1799\r
1800 if not self.IsLibrary and CreateLibraryMakeFile:\r
1801 for LibraryAutoGen in self.LibraryAutoGenList:\r
1802 LibraryAutoGen.CreateMakeFile()\r
673d09a2 1803\r
0e7e7a26
SS
1804 # CanSkip uses timestamps to determine build skipping\r
1805 if self.CanSkip():\r
e8449e1d
FB
1806 return\r
1807\r
1808 if len(self.CustomMakefile) == 0:\r
1809 Makefile = GenMake.ModuleMakefile(self)\r
1810 else:\r
1811 Makefile = GenMake.CustomMakefile(self)\r
1812 if Makefile.Generate():\r
1813 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r
1814 (self.Name, self.Arch))\r
1815 else:\r
1816 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r
1817 (self.Name, self.Arch))\r
1818\r
1819 CreateTimeStamp()\r
1820\r
0e7e7a26
SS
1821 MakefileType = Makefile._FileType\r
1822 MakefileName = Makefile._FILE_NAME_[MakefileType]\r
1823 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
1824\r
1825 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
1826 MewIR.MakefilePath = MakefilePath\r
1827 MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
1828 MewIR.CreateMakeFileDone = True\r
94459080 1829 with GlobalData.cache_lock:\r
0e7e7a26
SS
1830 try:\r
1831 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
1832 IR.MakefilePath = MakefilePath\r
1833 IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
1834 IR.CreateMakeFileDone = True\r
1835 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
1836 except:\r
1837 gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
1838\r
e8449e1d
FB
1839 def CopyBinaryFiles(self):\r
1840 for File in self.Module.Binaries:\r
1841 SrcPath = File.Path\r
1842 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r
1843 CopyLongFilePath(SrcPath, DstPath)\r
1844 ## Create autogen code for the module and its dependent libraries\r
1845 #\r
1846 # @param CreateLibraryCodeFile Flag indicating if or not the code of\r
1847 # dependent libraries will be created\r
1848 #\r
1849 def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
0e7e7a26
SS
1850 gDict = GlobalData.gCacheIR\r
1851 if (self.MetaFile.Path, self.Arch) in gDict and \\r
1852 gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
1853 return\r
1854\r
e8449e1d
FB
1855 if self.IsCodeFileCreated:\r
1856 return\r
1857\r
1858 # Need to generate PcdDatabase even PcdDriver is binarymodule\r
1859 if self.IsBinaryModule and self.PcdIsDriver != '':\r
1860 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
1861 return\r
1862 if self.IsBinaryModule:\r
1863 if self.IsLibrary:\r
1864 self.CopyBinaryFiles()\r
1865 return\r
1866\r
1867 if not self.IsLibrary and CreateLibraryCodeFile:\r
1868 for LibraryAutoGen in self.LibraryAutoGenList:\r
1869 LibraryAutoGen.CreateCodeFile()\r
0e7e7a26
SS
1870\r
1871 # CanSkip uses timestamps to determine build skipping\r
1872 if self.CanSkip():\r
e8449e1d 1873 return\r
1f5e4d91 1874 self.LibraryAutoGenList\r
e8449e1d
FB
1875 AutoGenList = []\r
1876 IgoredAutoGenList = []\r
1877\r
1878 for File in self.AutoGenFileList:\r
1879 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r
1880 AutoGenList.append(str(File))\r
1881 else:\r
1882 IgoredAutoGenList.append(str(File))\r
1883\r
1884\r
1885 for ModuleType in self.DepexList:\r
1886 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r
1887 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:\r
1888 continue\r
1889\r
1890 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r
1891 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r
1892\r
1893 if len(Dpx.PostfixNotation) != 0:\r
1894 self.DepexGenerated = True\r
1895\r
1896 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r
1897 AutoGenList.append(str(DpxFile))\r
1898 else:\r
1899 IgoredAutoGenList.append(str(DpxFile))\r
1900\r
1901 if IgoredAutoGenList == []:\r
1902 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r
1903 (" ".join(AutoGenList), self.Name, self.Arch))\r
1904 elif AutoGenList == []:\r
1905 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r
1906 (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r
1907 else:\r
1908 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r
1909 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
1910\r
1911 self.IsCodeFileCreated = True\r
0e7e7a26
SS
1912 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
1913 MewIR.CreateCodeFileDone = True\r
94459080 1914 with GlobalData.cache_lock:\r
0e7e7a26
SS
1915 try:\r
1916 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
1917 IR.CreateCodeFileDone = True\r
1918 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
1919 except:\r
1920 gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
1921\r
e8449e1d
FB
1922 return AutoGenList\r
1923\r
1924 ## Summarize the ModuleAutoGen objects of all libraries used by this module\r
1925 @cached_property\r
1926 def LibraryAutoGenList(self):\r
1927 RetVal = []\r
1928 for Library in self.DependentLibraryList:\r
1929 La = ModuleAutoGen(\r
1930 self.Workspace,\r
1931 Library.MetaFile,\r
1932 self.BuildTarget,\r
1933 self.ToolChain,\r
1934 self.Arch,\r
1935 self.PlatformInfo.MetaFile,\r
1936 self.DataPipe\r
1937 )\r
1938 La.IsLibrary = True\r
1939 if La not in RetVal:\r
1940 RetVal.append(La)\r
1941 for Lib in La.CodaTargetList:\r
1942 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
1943 return RetVal\r
1944\r
1945 def GenModuleHash(self):\r
1946 # Initialize a dictionary for each arch type\r
1947 if self.Arch not in GlobalData.gModuleHash:\r
1948 GlobalData.gModuleHash[self.Arch] = {}\r
1949\r
1950 # Early exit if module or library has been hashed and is in memory\r
1951 if self.Name in GlobalData.gModuleHash[self.Arch]:\r
1952 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
1953\r
1954 # Initialze hash object\r
1955 m = hashlib.md5()\r
1956\r
1957 # Add Platform level hash\r
1958 m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
1959\r
1960 # Add Package level hash\r
1961 if self.DependentPackageList:\r
1962 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
1963 if Pkg.PackageName in GlobalData.gPackageHash:\r
1964 m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
1965\r
1966 # Add Library hash\r
1967 if self.LibraryAutoGenList:\r
1968 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
1969 if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
1970 Lib.GenModuleHash()\r
1971 m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
1972\r
1973 # Add Module self\r
94459080
SS
1974 with open(str(self.MetaFile), 'rb') as f:\r
1975 Content = f.read()\r
e8449e1d
FB
1976 m.update(Content)\r
1977\r
1978 # Add Module's source files\r
1979 if self.SourceFileList:\r
1980 for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
1981 f = open(str(File), 'rb')\r
1982 Content = f.read()\r
1983 f.close()\r
1984 m.update(Content)\r
1985\r
1986 GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
1987\r
1988 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
1989\r
0e7e7a26
SS
1990 def GenModuleFilesHash(self, gDict):\r
1991 # Early exit if module or library has been hashed and is in memory\r
1992 if (self.MetaFile.Path, self.Arch) in gDict:\r
1993 if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
1994 return gDict[(self.MetaFile.Path, self.Arch)]\r
1995\r
94459080
SS
1996 # skip if the module cache already crashed\r
1997 if (self.MetaFile.Path, self.Arch) in gDict and \\r
1998 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
1999 return\r
2000\r
0e7e7a26
SS
2001 DependencyFileSet = set()\r
2002 # Add Module Meta file\r
2003 DependencyFileSet.add(self.MetaFile)\r
2004\r
2005 # Add Module's source files\r
2006 if self.SourceFileList:\r
2007 for File in set(self.SourceFileList):\r
2008 DependencyFileSet.add(File)\r
2009\r
2010 # Add modules's include header files\r
2011 # Search dependency file list for each source file\r
2012 SourceFileList = []\r
2013 OutPutFileList = []\r
2014 for Target in self.IntroTargetList:\r
2015 SourceFileList.extend(Target.Inputs)\r
2016 OutPutFileList.extend(Target.Outputs)\r
2017 if OutPutFileList:\r
2018 for Item in OutPutFileList:\r
2019 if Item in SourceFileList:\r
2020 SourceFileList.remove(Item)\r
2021 SearchList = []\r
2022 for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
2023 # skip the folders in platform BuildDir which are not been generated yet\r
2024 if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
2025 continue\r
2026 SearchList.append(file_path)\r
2027 FileDependencyDict = {}\r
2028 ForceIncludedFile = []\r
2029 for F in SourceFileList:\r
2030 # skip the files which are not been generated yet, because\r
2031 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
2032 if not os.path.exists(F.Path):\r
2033 continue\r
2034 FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
2035\r
2036 if FileDependencyDict:\r
2037 for Dependency in FileDependencyDict.values():\r
2038 DependencyFileSet.update(set(Dependency))\r
2039\r
2040 # Caculate all above dependency files hash\r
2041 # Initialze hash object\r
2042 FileList = []\r
2043 m = hashlib.md5()\r
2044 for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
2045 if not os.path.exists(str(File)):\r
2046 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
2047 continue\r
94459080
SS
2048 with open(str(File), 'rb') as f:\r
2049 Content = f.read()\r
0e7e7a26
SS
2050 m.update(Content)\r
2051 FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
2052\r
2053\r
2054 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
2055 MewIR.ModuleFilesHashDigest = m.digest()\r
2056 MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
2057 MewIR.ModuleFilesChain = FileList\r
94459080 2058 with GlobalData.cache_lock:\r
0e7e7a26
SS
2059 try:\r
2060 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2061 IR.ModuleFilesHashDigest = m.digest()\r
2062 IR.ModuleFilesHashHexDigest = m.hexdigest()\r
2063 IR.ModuleFilesChain = FileList\r
2064 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2065 except:\r
2066 gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
2067\r
2068 return gDict[(self.MetaFile.Path, self.Arch)]\r
2069\r
2070 def GenPreMakefileHash(self, gDict):\r
2071 # Early exit if module or library has been hashed and is in memory\r
2072 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2073 gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
2074 return gDict[(self.MetaFile.Path, self.Arch)]\r
2075\r
94459080
SS
2076 # skip if the module cache already crashed\r
2077 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2078 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2079 return\r
2080\r
0e7e7a26
SS
2081 # skip binary module\r
2082 if self.IsBinaryModule:\r
2083 return\r
2084\r
2085 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2086 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
2087 self.GenModuleFilesHash(gDict)\r
2088\r
2089 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2090 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
373298ca
FB
2091 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2092 return\r
0e7e7a26
SS
2093\r
2094 # Initialze hash object\r
2095 m = hashlib.md5()\r
2096\r
2097 # Add Platform level hash\r
2098 if ('PlatformHash') in gDict:\r
2099 m.update(gDict[('PlatformHash')].encode('utf-8'))\r
2100 else:\r
2101 EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
2102\r
2103 # Add Package level hash\r
2104 if self.DependentPackageList:\r
2105 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
2106 if (Pkg.PackageName, 'PackageHash') in gDict:\r
2107 m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
2108 else:\r
2109 EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
2110\r
2111 # Add Library hash\r
2112 if self.LibraryAutoGenList:\r
2113 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
2114 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
2115 not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
2116 Lib.GenPreMakefileHash(gDict)\r
2117 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
2118\r
2119 # Add Module self\r
2120 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
2121\r
94459080 2122 with GlobalData.cache_lock:\r
0e7e7a26
SS
2123 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2124 IR.PreMakefileHashHexDigest = m.hexdigest()\r
2125 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2126\r
2127 return gDict[(self.MetaFile.Path, self.Arch)]\r
2128\r
2129 def GenMakeHeaderFilesHash(self, gDict):\r
2130 # Early exit if module or library has been hashed and is in memory\r
2131 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2132 gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
2133 return gDict[(self.MetaFile.Path, self.Arch)]\r
2134\r
94459080
SS
2135 # skip if the module cache already crashed\r
2136 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2137 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2138 return\r
2139\r
0e7e7a26
SS
2140 # skip binary module\r
2141 if self.IsBinaryModule:\r
2142 return\r
2143\r
2144 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2145 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
2146 if self.IsLibrary:\r
2147 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
2148 self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
2149 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
2150 self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
2151 self.CreateCodeFile()\r
2152 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2153 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
e3c8311f 2154 self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.Path, self.Arch),[]))\r
0e7e7a26
SS
2155\r
2156 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2157 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
2158 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
2159 EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2160 return\r
2161\r
2162 DependencyFileSet = set()\r
2163 # Add Makefile\r
2164 if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
2165 DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
2166 else:\r
2167 EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2168\r
2169 # Add header files\r
2170 if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
2171 for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
2172 DependencyFileSet.add(File)\r
2173 else:\r
2174 EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2175\r
2176 # Add AutoGen files\r
2177 if self.AutoGenFileList:\r
2178 for File in set(self.AutoGenFileList):\r
2179 DependencyFileSet.add(File)\r
2180\r
2181 # Caculate all above dependency files hash\r
2182 # Initialze hash object\r
2183 FileList = []\r
2184 m = hashlib.md5()\r
2185 for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
2186 if not os.path.exists(str(File)):\r
2187 EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
2188 continue\r
2189 f = open(str(File), 'rb')\r
2190 Content = f.read()\r
2191 f.close()\r
2192 m.update(Content)\r
2193 FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
2194\r
94459080 2195 with GlobalData.cache_lock:\r
0e7e7a26
SS
2196 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2197 IR.AutoGenFileList = self.AutoGenFileList.keys()\r
2198 IR.MakeHeaderFilesHashChain = FileList\r
2199 IR.MakeHeaderFilesHashDigest = m.digest()\r
2200 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2201\r
2202 return gDict[(self.MetaFile.Path, self.Arch)]\r
2203\r
2204 def GenMakeHash(self, gDict):\r
2205 # Early exit if module or library has been hashed and is in memory\r
2206 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2207 gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
2208 return gDict[(self.MetaFile.Path, self.Arch)]\r
2209\r
94459080
SS
2210 # skip if the module cache already crashed\r
2211 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2212 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2213 return\r
2214\r
0e7e7a26
SS
2215 # skip binary module\r
2216 if self.IsBinaryModule:\r
2217 return\r
2218\r
2219 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2220 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
2221 self.GenModuleFilesHash(gDict)\r
2222 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
2223 self.GenMakeHeaderFilesHash(gDict)\r
2224\r
2225 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2226 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
2227 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
2228 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
2229 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
2230 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2231 return\r
2232\r
2233 # Initialze hash object\r
2234 m = hashlib.md5()\r
2235 MakeHashChain = []\r
2236\r
2237 # Add hash of makefile and dependency header files\r
2238 m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
2239 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
2240 New.sort(key=lambda x: str(x))\r
2241 MakeHashChain += New\r
2242\r
2243 # Add Library hash\r
2244 if self.LibraryAutoGenList:\r
2245 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
2246 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
2247 not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
2248 Lib.GenMakeHash(gDict)\r
2249 if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
2250 print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
2251 continue\r
2252 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
2253 New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
2254 New.sort(key=lambda x: str(x))\r
2255 MakeHashChain += New\r
2256\r
2257 # Add Module self\r
2258 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
2259 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
2260 New.sort(key=lambda x: str(x))\r
2261 MakeHashChain += New\r
2262\r
94459080 2263 with GlobalData.cache_lock:\r
0e7e7a26
SS
2264 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2265 IR.MakeHashDigest = m.digest()\r
2266 IR.MakeHashHexDigest = m.hexdigest()\r
2267 IR.MakeHashChain = MakeHashChain\r
2268 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2269\r
2270 return gDict[(self.MetaFile.Path, self.Arch)]\r
2271\r
2272 ## Decide whether we can skip the left autogen and make process\r
2273 def CanSkipbyPreMakefileCache(self, gDict):\r
2274 if not GlobalData.gBinCacheSource:\r
2275 return False\r
2276\r
94459080
SS
2277 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
2278 return True\r
2279\r
2280 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2281 return False\r
2282\r
0e7e7a26
SS
2283 # If Module is binary, do not skip by cache\r
2284 if self.IsBinaryModule:\r
2285 return False\r
2286\r
2287 # .inc is contains binary information so do not skip by hash as well\r
2288 for f_ext in self.SourceFileList:\r
2289 if '.inc' in str(f_ext):\r
2290 return False\r
2291\r
2292 # Get the module hash values from stored cache and currrent build\r
2293 # then check whether cache hit based on the hash values\r
2294 # if cache hit, restore all the files from cache\r
2295 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2296 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
2297\r
2298 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
2299 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
2300 if not os.path.exists(ModuleHashPair):\r
2301 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
94459080
SS
2302 with GlobalData.cache_lock:\r
2303 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2304 IR.CacheCrash = True\r
2305 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
0e7e7a26
SS
2306 return False\r
2307\r
2308 try:\r
94459080
SS
2309 with open(ModuleHashPair, 'r') as f:\r
2310 ModuleHashPairList = json.load(f)\r
0e7e7a26
SS
2311 except:\r
2312 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
2313 return False\r
2314\r
2315 self.GenPreMakefileHash(gDict)\r
2316 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2317 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
2318 EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2319 return False\r
2320\r
2321 MakeHashStr = None\r
2322 CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
2323 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
2324 if PreMakefileHash == CurrentPreMakeHash:\r
2325 MakeHashStr = str(MakeHash)\r
2326\r
2327 if not MakeHashStr:\r
2328 return False\r
2329\r
2330 TargetHashDir = path.join(FileDir, MakeHashStr)\r
2331 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
2332\r
2333 if not os.path.exists(TargetHashDir):\r
2334 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
2335 return False\r
2336\r
2337 for root, dir, files in os.walk(TargetHashDir):\r
2338 for f in files:\r
2339 File = path.join(root, f)\r
2340 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
2341 if os.path.exists(TargetFfsHashDir):\r
2342 for root, dir, files in os.walk(TargetFfsHashDir):\r
2343 for f in files:\r
2344 File = path.join(root, f)\r
2345 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
2346\r
2347 if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
2348 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
2349\r
94459080 2350 with GlobalData.cache_lock:\r
0e7e7a26
SS
2351 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2352 IR.PreMakeCacheHit = True\r
2353 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2354 print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
2355 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2356 return True\r
2357\r
2358 ## Decide whether we can skip the make process\r
2359 def CanSkipbyMakeCache(self, gDict):\r
2360 if not GlobalData.gBinCacheSource:\r
2361 return False\r
2362\r
94459080
SS
2363 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
2364 return True\r
2365\r
2366 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2367 return False\r
2368\r
0e7e7a26
SS
2369 # If Module is binary, do not skip by cache\r
2370 if self.IsBinaryModule:\r
2371 print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
2372 return False\r
2373\r
2374 # .inc is contains binary information so do not skip by hash as well\r
2375 for f_ext in self.SourceFileList:\r
2376 if '.inc' in str(f_ext):\r
94459080 2377 with GlobalData.cache_lock:\r
0e7e7a26
SS
2378 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2379 IR.MakeCacheHit = False\r
2380 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2381 print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
2382 return False\r
2383\r
2384 # Get the module hash values from stored cache and currrent build\r
2385 # then check whether cache hit based on the hash values\r
2386 # if cache hit, restore all the files from cache\r
2387 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2388 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
2389\r
2390 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
2391 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
2392 if not os.path.exists(ModuleHashPair):\r
2393 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
94459080
SS
2394 with GlobalData.cache_lock:\r
2395 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2396 IR.CacheCrash = True\r
2397 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
0e7e7a26
SS
2398 return False\r
2399\r
2400 try:\r
94459080
SS
2401 with open(ModuleHashPair, 'r') as f:\r
2402 ModuleHashPairList = json.load(f)\r
0e7e7a26
SS
2403 except:\r
2404 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
2405 return False\r
2406\r
2407 self.GenMakeHash(gDict)\r
2408 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2409 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
2410 EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2411 return False\r
2412\r
2413 MakeHashStr = None\r
2414 CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
2415 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
2416 if MakeHash == CurrentMakeHash:\r
2417 MakeHashStr = str(MakeHash)\r
2418\r
2419 if not MakeHashStr:\r
2420 print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
2421 return False\r
2422\r
2423 TargetHashDir = path.join(FileDir, MakeHashStr)\r
2424 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
2425 if not os.path.exists(TargetHashDir):\r
2426 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
2427 return False\r
2428\r
2429 for root, dir, files in os.walk(TargetHashDir):\r
2430 for f in files:\r
2431 File = path.join(root, f)\r
2432 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
2433\r
2434 if os.path.exists(TargetFfsHashDir):\r
2435 for root, dir, files in os.walk(TargetFfsHashDir):\r
2436 for f in files:\r
2437 File = path.join(root, f)\r
2438 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
2439\r
2440 if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
2441 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
94459080 2442 with GlobalData.cache_lock:\r
0e7e7a26
SS
2443 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2444 IR.MakeCacheHit = True\r
2445 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2446 print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
2447 return True\r
2448\r
56c786b0
SS
2449 ## Show the first file name which causes cache miss\r
2450 def PrintFirstMakeCacheMissFile(self, gDict):\r
2451 if not GlobalData.gBinCacheSource:\r
2452 return\r
2453\r
94459080
SS
2454 # skip if the module cache already crashed\r
2455 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2456 return\r
2457\r
56c786b0
SS
2458 # skip binary module\r
2459 if self.IsBinaryModule:\r
2460 return\r
2461\r
2462 if not (self.MetaFile.Path, self.Arch) in gDict:\r
2463 return\r
2464\r
2465 # Only print cache miss file for the MakeCache not hit module\r
2466 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
2467 return\r
2468\r
2469 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
2470 EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2471 return\r
2472\r
2473 # Find the cache dir name through the .ModuleHashPair file info\r
2474 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2475\r
2476 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
2477 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
2478 if not os.path.exists(ModuleHashPair):\r
2479 EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2480 return\r
2481\r
2482 try:\r
94459080
SS
2483 with open(ModuleHashPair, 'r') as f:\r
2484 ModuleHashPairList = json.load(f)\r
56c786b0
SS
2485 except:\r
2486 EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2487 return\r
2488\r
2489 MakeHashSet = set()\r
2490 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
2491 TargetHashDir = path.join(FileDir, str(MakeHash))\r
2492 if os.path.exists(TargetHashDir):\r
2493 MakeHashSet.add(MakeHash)\r
2494 if not MakeHashSet:\r
2495 EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2496 return\r
2497\r
2498 TargetHash = list(MakeHashSet)[0]\r
2499 TargetHashDir = path.join(FileDir, str(TargetHash))\r
2500 if len(MakeHashSet) > 1 :\r
2501 EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))\r
2502\r
2503 ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')\r
2504 if os.path.exists(ListFile):\r
2505 try:\r
2506 f = open(ListFile, 'r')\r
2507 CachedList = json.load(f)\r
2508 f.close()\r
2509 except:\r
2510 EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)\r
2511 return\r
2512 else:\r
2513 EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)\r
2514 return\r
2515\r
2516 CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain\r
2517 for idx, (file, hash) in enumerate (CurrentList):\r
2518 (filecached, hashcached) = CachedList[idx]\r
2519 if file != filecached:\r
2520 EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))\r
2521 break\r
2522 if hash != hashcached:\r
2523 EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))\r
2524 break\r
2525\r
2526 return True\r
2527\r
e8449e1d 2528 ## Decide whether we can skip the ModuleAutoGen process\r
0e7e7a26 2529 def CanSkipbyCache(self, gDict):\r
e8449e1d 2530 # Hashing feature is off\r
0e7e7a26 2531 if not GlobalData.gBinCacheSource:\r
e8449e1d
FB
2532 return False\r
2533\r
0e7e7a26
SS
2534 if self in GlobalData.gBuildHashSkipTracking:\r
2535 return GlobalData.gBuildHashSkipTracking[self]\r
e8449e1d
FB
2536\r
2537 # If library or Module is binary do not skip by hash\r
2538 if self.IsBinaryModule:\r
0e7e7a26 2539 GlobalData.gBuildHashSkipTracking[self] = False\r
e8449e1d
FB
2540 return False\r
2541\r
2542 # .inc is contains binary information so do not skip by hash as well\r
2543 for f_ext in self.SourceFileList:\r
2544 if '.inc' in str(f_ext):\r
0e7e7a26 2545 GlobalData.gBuildHashSkipTracking[self] = False\r
e8449e1d
FB
2546 return False\r
2547\r
0e7e7a26
SS
2548 if not (self.MetaFile.Path, self.Arch) in gDict:\r
2549 return False\r
2550\r
2551 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
2552 GlobalData.gBuildHashSkipTracking[self] = True\r
e8449e1d
FB
2553 return True\r
2554\r
0e7e7a26
SS
2555 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
2556 GlobalData.gBuildHashSkipTracking[self] = True\r
2557 return True\r
e8449e1d 2558\r
0e7e7a26 2559 return False\r
e8449e1d
FB
2560\r
2561 ## Decide whether we can skip the ModuleAutoGen process\r
2562 # If any source file is newer than the module than we cannot skip\r
2563 #\r
2564 def CanSkip(self):\r
0e7e7a26
SS
2565 # Don't skip if cache feature enabled\r
2566 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r
2567 return False\r
e8449e1d
FB
2568 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
2569 return True\r
2570 if not os.path.exists(self.TimeStampPath):\r
2571 return False\r
2572 #last creation time of the module\r
2573 DstTimeStamp = os.stat(self.TimeStampPath)[8]\r
2574\r
2575 SrcTimeStamp = self.Workspace._SrcTimeStamp\r
2576 if SrcTimeStamp > DstTimeStamp:\r
2577 return False\r
2578\r
2579 with open(self.TimeStampPath,'r') as f:\r
2580 for source in f:\r
2581 source = source.rstrip('\n')\r
2582 if not os.path.exists(source):\r
2583 return False\r
2584 if source not in ModuleAutoGen.TimeDict :\r
2585 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r
2586 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r
2587 return False\r
2588 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r
2589 return True\r
2590\r
2591 @cached_property\r
2592 def TimeStampPath(self):\r
2593 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r