]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
MdeModulePkg/SdMmcPciHcDxe: Fix SdMmcMmcLegacy bus timing handling
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
CommitLineData
e8449e1d
FB
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8from AutoGen.AutoGen import AutoGen\r
9from Common.LongFilePathSupport import CopyLongFilePath\r
10from Common.BuildToolError import *\r
11from Common.DataType import *\r
12from Common.Misc import *\r
13from Common.StringUtils import NormPath,GetSplitList\r
14from collections import defaultdict\r
15from Workspace.WorkspaceCommon import OrderedListDict\r
16import os.path as path\r
17import copy\r
18import hashlib\r
19from . import InfSectionParser\r
20from . import GenC\r
21from . import GenMake\r
22from . import GenDepex\r
23from io import BytesIO\r
24from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r
25from Workspace.MetaFileCommentParser import UsageList\r
26from .GenPcdDb import CreatePcdDatabaseCode\r
27from Common.caching import cached_class_function\r
28from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
0e7e7a26
SS
29from AutoGen.CacheIR import ModuleBuildCacheIR\r
30import json\r
94459080 31import tempfile\r
e8449e1d
FB
32\r
33## Mapping Makefile type\r
34gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
35#\r
36# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
37# is the former use /I , the Latter used -I to specify include directories\r
38#\r
39gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
40gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
41\r
42## default file name for AutoGen\r
43gAutoGenCodeFileName = "AutoGen.c"\r
44gAutoGenHeaderFileName = "AutoGen.h"\r
45gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r
46gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r
47gAutoGenDepexFileName = "%(module_name)s.depex"\r
48gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r
49gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r
50gInfSpecVersion = "0x00010017"\r
51\r
52#\r
53# Match name = variable\r
54#\r
55gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r
56#\r
57# The format of guid in efivarstore statement likes following and must be correct:\r
58# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r
59#\r
60gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r
61\r
62#\r
63# Template string to generic AsBuilt INF\r
64#\r
65gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r
66\r
67# DO NOT EDIT\r
68# FILE auto-generated\r
69\r
70[Defines]\r
71 INF_VERSION = ${module_inf_version}\r
72 BASE_NAME = ${module_name}\r
73 FILE_GUID = ${module_guid}\r
74 MODULE_TYPE = ${module_module_type}${BEGIN}\r
75 VERSION_STRING = ${module_version_string}${END}${BEGIN}\r
76 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r
77 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r
78 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r
79 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r
80 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r
81 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r
82 DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r
83 SHADOW = ${module_shadow}${END}${BEGIN}\r
84 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r
85 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r
86 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r
87 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r
88 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r
89 SPEC = ${module_spec}${END}${BEGIN}\r
90 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r
91 MODULE_UNI_FILE = ${module_uni_file}${END}\r
92\r
93[Packages.${module_arch}]${BEGIN}\r
94 ${package_item}${END}\r
95\r
96[Binaries.${module_arch}]${BEGIN}\r
97 ${binary_item}${END}\r
98\r
99[PatchPcd.${module_arch}]${BEGIN}\r
100 ${patchablepcd_item}\r
101${END}\r
102\r
103[Protocols.${module_arch}]${BEGIN}\r
104 ${protocol_item}\r
105${END}\r
106\r
107[Ppis.${module_arch}]${BEGIN}\r
108 ${ppi_item}\r
109${END}\r
110\r
111[Guids.${module_arch}]${BEGIN}\r
112 ${guid_item}\r
113${END}\r
114\r
115[PcdEx.${module_arch}]${BEGIN}\r
116 ${pcd_item}\r
117${END}\r
118\r
119[LibraryClasses.${module_arch}]\r
120## @LIB_INSTANCES${BEGIN}\r
121# ${libraryclasses_item}${END}\r
122\r
123${depexsection_item}\r
124\r
125${userextension_tianocore_item}\r
126\r
127${tail_comments}\r
128\r
129[BuildOptions.${module_arch}]\r
130## @AsBuilt${BEGIN}\r
131## ${flags_item}${END}\r
132""")\r
133#\r
134# extend lists contained in a dictionary with lists stored in another dictionary\r
135# if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r
136#\r
137def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r
138 for Key in CopyFromDict:\r
139 CopyToDict[Key].extend(CopyFromDict[Key])\r
140\r
141# Create a directory specified by a set of path elements and return the full path\r
142def _MakeDir(PathList):\r
143 RetVal = path.join(*PathList)\r
144 CreateDirectory(RetVal)\r
145 return RetVal\r
146\r
147#\r
148# Convert string to C format array\r
149#\r
150def _ConvertStringToByteArray(Value):\r
151 Value = Value.strip()\r
152 if not Value:\r
153 return None\r
154 if Value[0] == '{':\r
155 if not Value.endswith('}'):\r
156 return None\r
157 Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r
158 ValFields = Value.split(',')\r
159 try:\r
160 for Index in range(len(ValFields)):\r
161 ValFields[Index] = str(int(ValFields[Index], 0))\r
162 except ValueError:\r
163 return None\r
164 Value = '{' + ','.join(ValFields) + '}'\r
165 return Value\r
166\r
167 Unicode = False\r
168 if Value.startswith('L"'):\r
169 if not Value.endswith('"'):\r
170 return None\r
171 Value = Value[1:]\r
172 Unicode = True\r
173 elif not Value.startswith('"') or not Value.endswith('"'):\r
174 return None\r
175\r
176 Value = eval(Value) # translate escape character\r
177 NewValue = '{'\r
178 for Index in range(0, len(Value)):\r
179 if Unicode:\r
180 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r
181 else:\r
182 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r
183 Value = NewValue + '0}'\r
184 return Value\r
185\r
186## ModuleAutoGen class\r
187#\r
188# This class encapsules the AutoGen behaviors for the build tools. In addition to\r
189# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r
190# to the [depex] section in module's inf file.\r
191#\r
192class ModuleAutoGen(AutoGen):\r
193 # call super().__init__ then call the worker function with different parameter count\r
194 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
195 if not hasattr(self, "_Init"):\r
196 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r
197 self._Init = True\r
198\r
199 ## Cache the timestamps of metafiles of every module in a class attribute\r
200 #\r
201 TimeDict = {}\r
202\r
203 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
204# check if this module is employed by active platform\r
205 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):\r
206 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r
207 % (MetaFile, Arch))\r
208 return None\r
209 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
210\r
211 ## Initialize ModuleAutoGen\r
212 #\r
213 # @param Workspace EdkIIWorkspaceBuild object\r
214 # @param ModuleFile The path of module file\r
215 # @param Target Build target (DEBUG, RELEASE)\r
216 # @param Toolchain Name of tool chain\r
217 # @param Arch The arch the module supports\r
218 # @param PlatformFile Platform meta-file\r
219 #\r
220 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):\r
221 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r
222 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r
223\r
a000d5d1 224 self.Workspace = Workspace\r
e8449e1d
FB
225 self.WorkspaceDir = ""\r
226 self.PlatformInfo = None\r
227 self.DataPipe = DataPipe\r
228 self.__init_platform_info__()\r
229 self.MetaFile = ModuleFile\r
230 self.SourceDir = self.MetaFile.SubDir\r
231 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r
232\r
233 self.ToolChain = Toolchain\r
234 self.BuildTarget = Target\r
235 self.Arch = Arch\r
236 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r
237 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r
238\r
239 self.IsCodeFileCreated = False\r
240 self.IsAsBuiltInfCreated = False\r
241 self.DepexGenerated = False\r
242\r
243 self.BuildDatabase = self.Workspace.BuildDatabase\r
244 self.BuildRuleOrder = None\r
245 self.BuildTime = 0\r
246\r
247 self._GuidComments = OrderedListDict()\r
248 self._ProtocolComments = OrderedListDict()\r
249 self._PpiComments = OrderedListDict()\r
250 self._BuildTargets = None\r
251 self._IntroBuildTargetList = None\r
252 self._FinalBuildTargetList = None\r
253 self._FileTypes = None\r
254\r
255 self.AutoGenDepSet = set()\r
256 self.ReferenceModules = []\r
257 self.ConstPcd = {}\r
0e7e7a26
SS
258 self.Makefile = None\r
259 self.FileDependCache = {}\r
e8449e1d
FB
260\r
261 def __init_platform_info__(self):\r
262 pinfo = self.DataPipe.Get("P_Info")\r
e8449e1d
FB
263 self.WorkspaceDir = pinfo.get("WorkspaceDir")\r
264 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)\r
265 ## hash() operator of ModuleAutoGen\r
266 #\r
267 # The module file path and arch string will be used to represent\r
268 # hash value of this object\r
269 #\r
270 # @retval int Hash value of the module file path and arch\r
271 #\r
272 @cached_class_function\r
273 def __hash__(self):\r
274 return hash((self.MetaFile, self.Arch))\r
275 def __repr__(self):\r
276 return "%s [%s]" % (self.MetaFile, self.Arch)\r
277\r
278 # Get FixedAtBuild Pcds of this Module\r
279 @cached_property\r
280 def FixedAtBuildPcds(self):\r
281 RetVal = []\r
282 for Pcd in self.ModulePcdList:\r
283 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r
284 continue\r
285 if Pcd not in RetVal:\r
286 RetVal.append(Pcd)\r
287 return RetVal\r
288\r
289 @cached_property\r
290 def FixedVoidTypePcds(self):\r
291 RetVal = {}\r
292 for Pcd in self.FixedAtBuildPcds:\r
293 if Pcd.DatumType == TAB_VOID:\r
294 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:\r
295 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue\r
296 return RetVal\r
297\r
298 @property\r
299 def UniqueBaseName(self):\r
300 ModuleNames = self.DataPipe.Get("M_Name")\r
301 if not ModuleNames:\r
302 return self.Name\r
76e12fa3 303 return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r
e8449e1d
FB
304\r
305 # Macros could be used in build_rule.txt (also Makefile)\r
306 @cached_property\r
307 def Macros(self):\r
308 return OrderedDict((\r
309 ("WORKSPACE" ,self.WorkspaceDir),\r
310 ("MODULE_NAME" ,self.Name),\r
311 ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r
312 ("MODULE_GUID" ,self.Guid),\r
313 ("MODULE_VERSION" ,self.Version),\r
314 ("MODULE_TYPE" ,self.ModuleType),\r
315 ("MODULE_FILE" ,str(self.MetaFile)),\r
316 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r
317 ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r
318 ("MODULE_DIR" ,self.SourceDir),\r
319 ("BASE_NAME" ,self.Name),\r
320 ("ARCH" ,self.Arch),\r
321 ("TOOLCHAIN" ,self.ToolChain),\r
322 ("TOOLCHAIN_TAG" ,self.ToolChain),\r
323 ("TOOL_CHAIN_TAG" ,self.ToolChain),\r
324 ("TARGET" ,self.BuildTarget),\r
325 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r
326 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
327 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
328 ("MODULE_BUILD_DIR" ,self.BuildDir),\r
329 ("OUTPUT_DIR" ,self.OutputDir),\r
330 ("DEBUG_DIR" ,self.DebugDir),\r
331 ("DEST_DIR_OUTPUT" ,self.OutputDir),\r
332 ("DEST_DIR_DEBUG" ,self.DebugDir),\r
333 ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r
334 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r
335 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r
336 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r
337 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r
338 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r
339 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r
340 ))\r
341\r
342 ## Return the module build data object\r
343 @cached_property\r
344 def Module(self):\r
345 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
346\r
347 ## Return the module name\r
348 @cached_property\r
349 def Name(self):\r
350 return self.Module.BaseName\r
351\r
352 ## Return the module DxsFile if exist\r
353 @cached_property\r
354 def DxsFile(self):\r
355 return self.Module.DxsFile\r
356\r
357 ## Return the module meta-file GUID\r
358 @cached_property\r
359 def Guid(self):\r
360 #\r
361 # To build same module more than once, the module path with FILE_GUID overridden has\r
362 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r
363 # in DSC. The overridden GUID can be retrieved from file name\r
364 #\r
365 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r
366 #\r
367 # Length of GUID is 36\r
368 #\r
369 return os.path.basename(self.MetaFile.Path)[:36]\r
370 return self.Module.Guid\r
371\r
372 ## Return the module version\r
373 @cached_property\r
374 def Version(self):\r
375 return self.Module.Version\r
376\r
377 ## Return the module type\r
378 @cached_property\r
379 def ModuleType(self):\r
380 return self.Module.ModuleType\r
381\r
382 ## Return the component type (for Edk.x style of module)\r
383 @cached_property\r
384 def ComponentType(self):\r
385 return self.Module.ComponentType\r
386\r
387 ## Return the build type\r
388 @cached_property\r
389 def BuildType(self):\r
390 return self.Module.BuildType\r
391\r
392 ## Return the PCD_IS_DRIVER setting\r
393 @cached_property\r
394 def PcdIsDriver(self):\r
395 return self.Module.PcdIsDriver\r
396\r
397 ## Return the autogen version, i.e. module meta-file version\r
398 @cached_property\r
399 def AutoGenVersion(self):\r
400 return self.Module.AutoGenVersion\r
401\r
402 ## Check if the module is library or not\r
403 @cached_property\r
404 def IsLibrary(self):\r
405 return bool(self.Module.LibraryClass)\r
406\r
407 ## Check if the module is binary module or not\r
408 @cached_property\r
409 def IsBinaryModule(self):\r
410 return self.Module.IsBinaryModule\r
411\r
412 ## Return the directory to store intermediate files of the module\r
413 @cached_property\r
414 def BuildDir(self):\r
415 return _MakeDir((\r
416 self.PlatformInfo.BuildDir,\r
417 self.Arch,\r
418 self.SourceDir,\r
419 self.MetaFile.BaseName\r
420 ))\r
421\r
422 ## Return the directory to store the intermediate object files of the module\r
423 @cached_property\r
424 def OutputDir(self):\r
425 return _MakeDir((self.BuildDir, "OUTPUT"))\r
426\r
427 ## Return the directory path to store ffs file\r
428 @cached_property\r
429 def FfsOutputDir(self):\r
430 if GlobalData.gFdfParser:\r
431 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
432 return ''\r
433\r
434 ## Return the directory to store auto-gened source files of the module\r
435 @cached_property\r
436 def DebugDir(self):\r
437 return _MakeDir((self.BuildDir, "DEBUG"))\r
438\r
439 ## Return the path of custom file\r
440 @cached_property\r
441 def CustomMakefile(self):\r
442 RetVal = {}\r
443 for Type in self.Module.CustomMakefile:\r
444 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r
445 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
446 RetVal[MakeType] = File\r
447 return RetVal\r
448\r
449 ## Return the directory of the makefile\r
450 #\r
451 # @retval string The directory string of module's makefile\r
452 #\r
453 @cached_property\r
454 def MakeFileDir(self):\r
455 return self.BuildDir\r
456\r
457 ## Return build command string\r
458 #\r
459 # @retval string Build command string\r
460 #\r
461 @cached_property\r
462 def BuildCommand(self):\r
463 return self.PlatformInfo.BuildCommand\r
464\r
465 ## Get object list of all packages the module and its dependent libraries belong to\r
466 #\r
467 # @retval list The list of package object\r
468 #\r
469 @cached_property\r
470 def DerivedPackageList(self):\r
471 PackageList = []\r
472 for M in [self.Module] + self.DependentLibraryList:\r
473 for Package in M.Packages:\r
474 if Package in PackageList:\r
475 continue\r
476 PackageList.append(Package)\r
477 return PackageList\r
478\r
479 ## Get the depex string\r
480 #\r
481 # @return : a string contain all depex expression.\r
482 def _GetDepexExpresionString(self):\r
483 DepexStr = ''\r
484 DepexList = []\r
485 ## DPX_SOURCE IN Define section.\r
486 if self.Module.DxsFile:\r
487 return DepexStr\r
488 for M in [self.Module] + self.DependentLibraryList:\r
489 Filename = M.MetaFile.Path\r
490 InfObj = InfSectionParser.InfSectionParser(Filename)\r
491 DepexExpressionList = InfObj.GetDepexExpresionList()\r
492 for DepexExpression in DepexExpressionList:\r
493 for key in DepexExpression:\r
494 Arch, ModuleType = key\r
495 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r
496 # the type of build module is USER_DEFINED.\r
497 # All different DEPEX section tags would be copied into the As Built INF file\r
498 # and there would be separate DEPEX section tags\r
499 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
500 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r
501 DepexList.append({(Arch, ModuleType): DepexExpr})\r
502 else:\r
503 if Arch.upper() == TAB_ARCH_COMMON or \\r
504 (Arch.upper() == self.Arch.upper() and \\r
505 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r
506 DepexList.append({(Arch, ModuleType): DepexExpr})\r
507\r
508 #the type of build module is USER_DEFINED.\r
509 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
510 for Depex in DepexList:\r
511 for key in Depex:\r
512 DepexStr += '[Depex.%s.%s]\n' % key\r
513 DepexStr += '\n'.join('# '+ val for val in Depex[key])\r
514 DepexStr += '\n\n'\r
515 if not DepexStr:\r
516 return '[Depex.%s]\n' % self.Arch\r
517 return DepexStr\r
518\r
519 #the type of build module not is USER_DEFINED.\r
520 Count = 0\r
521 for Depex in DepexList:\r
522 Count += 1\r
523 if DepexStr != '':\r
524 DepexStr += ' AND '\r
525 DepexStr += '('\r
526 for D in Depex.values():\r
527 DepexStr += ' '.join(val for val in D)\r
528 Index = DepexStr.find('END')\r
529 if Index > -1 and Index == len(DepexStr) - 3:\r
530 DepexStr = DepexStr[:-3]\r
531 DepexStr = DepexStr.strip()\r
532 DepexStr += ')'\r
533 if Count == 1:\r
534 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r
535 if not DepexStr:\r
536 return '[Depex.%s]\n' % self.Arch\r
537 return '[Depex.%s]\n# ' % self.Arch + DepexStr\r
538\r
539 ## Merge dependency expression\r
540 #\r
541 # @retval list The token list of the dependency expression after parsed\r
542 #\r
543 @cached_property\r
544 def DepexList(self):\r
545 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
546 return {}\r
547\r
548 DepexList = []\r
549 #\r
550 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r
551 #\r
552 FixedVoidTypePcds = {}\r
553 for M in [self] + self.LibraryAutoGenList:\r
554 FixedVoidTypePcds.update(M.FixedVoidTypePcds)\r
555 for M in [self] + self.LibraryAutoGenList:\r
556 Inherited = False\r
557 for D in M.Module.Depex[self.Arch, self.ModuleType]:\r
558 if DepexList != []:\r
559 DepexList.append('AND')\r
560 DepexList.append('(')\r
561 #replace D with value if D is FixedAtBuild PCD\r
562 NewList = []\r
563 for item in D:\r
564 if '.' not in item:\r
565 NewList.append(item)\r
566 else:\r
567 try:\r
568 Value = FixedVoidTypePcds[item]\r
569 if len(Value.split(',')) != 16:\r
570 EdkLogger.error("build", FORMAT_INVALID,\r
571 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r
572 NewList.append(Value)\r
573 except:\r
574 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r
575\r
576 DepexList.extend(NewList)\r
577 if DepexList[-1] == 'END': # no need of a END at this time\r
578 DepexList.pop()\r
579 DepexList.append(')')\r
580 Inherited = True\r
581 if Inherited:\r
582 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))\r
583 if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r
584 break\r
585 if len(DepexList) > 0:\r
586 EdkLogger.verbose('')\r
587 return {self.ModuleType:DepexList}\r
588\r
589 ## Merge dependency expression\r
590 #\r
591 # @retval list The token list of the dependency expression after parsed\r
592 #\r
593 @cached_property\r
594 def DepexExpressionDict(self):\r
595 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
596 return {}\r
597\r
598 DepexExpressionString = ''\r
599 #\r
600 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r
601 #\r
602 for M in [self.Module] + self.DependentLibraryList:\r
603 Inherited = False\r
604 for D in M.DepexExpression[self.Arch, self.ModuleType]:\r
605 if DepexExpressionString != '':\r
606 DepexExpressionString += ' AND '\r
607 DepexExpressionString += '('\r
608 DepexExpressionString += D\r
609 DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r
610 DepexExpressionString += ')'\r
611 Inherited = True\r
612 if Inherited:\r
613 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r
614 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r
615 break\r
616 if len(DepexExpressionString) > 0:\r
617 EdkLogger.verbose('')\r
618\r
619 return {self.ModuleType:DepexExpressionString}\r
620\r
621 # Get the tiano core user extension, it is contain dependent library.\r
622 # @retval: a list contain tiano core userextension.\r
623 #\r
624 def _GetTianoCoreUserExtensionList(self):\r
625 TianoCoreUserExtentionList = []\r
626 for M in [self.Module] + self.DependentLibraryList:\r
627 Filename = M.MetaFile.Path\r
628 InfObj = InfSectionParser.InfSectionParser(Filename)\r
629 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r
630 for TianoCoreUserExtent in TianoCoreUserExtenList:\r
631 for Section in TianoCoreUserExtent:\r
632 ItemList = Section.split(TAB_SPLIT)\r
633 Arch = self.Arch\r
634 if len(ItemList) == 4:\r
635 Arch = ItemList[3]\r
636 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r
637 TianoCoreList = []\r
638 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r
639 TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r
640 TianoCoreList.append('\n')\r
641 TianoCoreUserExtentionList.append(TianoCoreList)\r
642\r
643 return TianoCoreUserExtentionList\r
644\r
645 ## Return the list of specification version required for the module\r
646 #\r
647 # @retval list The list of specification defined in module file\r
648 #\r
649 @cached_property\r
650 def Specification(self):\r
651 return self.Module.Specification\r
652\r
653 ## Tool option for the module build\r
654 #\r
655 # @param PlatformInfo The object of PlatformBuildInfo\r
656 # @retval dict The dict containing valid options\r
657 #\r
658 @cached_property\r
659 def BuildOption(self):\r
660 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r
661 if self.BuildRuleOrder:\r
662 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r
663 return RetVal\r
664\r
665 ## Get include path list from tool option for the module build\r
666 #\r
667 # @retval list The include path list\r
668 #\r
669 @cached_property\r
670 def BuildOptionIncPathList(self):\r
671 #\r
672 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
673 # is the former use /I , the Latter used -I to specify include directories\r
674 #\r
675 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r
676 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r
677 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
678 BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r
679 else:\r
680 #\r
681 # New ToolChainFamily, don't known whether there is option to specify include directories\r
682 #\r
683 return []\r
684\r
685 RetVal = []\r
686 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r
687 try:\r
688 FlagOption = self.BuildOption[Tool]['FLAGS']\r
689 except KeyError:\r
690 FlagOption = ''\r
691\r
692 if self.ToolChainFamily != 'RVCT':\r
693 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
694 else:\r
695 #\r
696 # RVCT may specify a list of directory seperated by commas\r
697 #\r
698 IncPathList = []\r
699 for Path in BuildOptIncludeRegEx.findall(FlagOption):\r
700 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r
701 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r
702\r
703 #\r
704 # EDK II modules must not reference header files outside of the packages they depend on or\r
705 # within the module's directory tree. Report error if violation.\r
706 #\r
707 if GlobalData.gDisableIncludePathCheck == False:\r
708 for Path in IncPathList:\r
709 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
710 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
711 EdkLogger.error("build",\r
712 PARAMETER_INVALID,\r
713 ExtraData=ErrMsg,\r
714 File=str(self.MetaFile))\r
715 RetVal += IncPathList\r
716 return RetVal\r
717\r
718 ## Return a list of files which can be built from source\r
719 #\r
720 # What kind of files can be built is determined by build rules in\r
721 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r
722 #\r
723 @cached_property\r
724 def SourceFileList(self):\r
725 RetVal = []\r
726 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r
727 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r
728 for F in self.Module.Sources:\r
729 # match tool chain\r
730 if F.TagName not in ToolChainTagSet:\r
731 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r
732 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r
733 continue\r
734 # match tool chain family or build rule family\r
735 if F.ToolChainFamily not in ToolChainFamilySet:\r
736 EdkLogger.debug(\r
737 EdkLogger.DEBUG_0,\r
738 "The file [%s] must be built by tools of [%s], " \\r
739 "but current toolchain family is [%s], buildrule family is [%s]" \\r
740 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r
741 continue\r
742\r
743 # add the file path into search path list for file including\r
744 if F.Dir not in self.IncludePathList:\r
745 self.IncludePathList.insert(0, F.Dir)\r
746 RetVal.append(F)\r
747\r
748 self._MatchBuildRuleOrder(RetVal)\r
749\r
750 for F in RetVal:\r
751 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r
752 return RetVal\r
753\r
754 def _MatchBuildRuleOrder(self, FileList):\r
755 Order_Dict = {}\r
756 self.BuildOption\r
757 for SingleFile in FileList:\r
758 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r
759 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r
760 if key in Order_Dict:\r
761 Order_Dict[key].append(SingleFile.Ext)\r
762 else:\r
763 Order_Dict[key] = [SingleFile.Ext]\r
764\r
765 RemoveList = []\r
766 for F in Order_Dict:\r
767 if len(Order_Dict[F]) > 1:\r
768 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r
769 for Ext in Order_Dict[F][1:]:\r
770 RemoveList.append(F + Ext)\r
771\r
772 for item in RemoveList:\r
773 FileList.remove(item)\r
774\r
775 return FileList\r
776\r
777 ## Return the list of unicode files\r
778 @cached_property\r
779 def UnicodeFileList(self):\r
780 return self.FileTypes.get(TAB_UNICODE_FILE,[])\r
781\r
782 ## Return the list of vfr files\r
783 @cached_property\r
784 def VfrFileList(self):\r
785 return self.FileTypes.get(TAB_VFR_FILE, [])\r
786\r
787 ## Return the list of Image Definition files\r
788 @cached_property\r
789 def IdfFileList(self):\r
790 return self.FileTypes.get(TAB_IMAGE_FILE,[])\r
791\r
792 ## Return a list of files which can be built from binary\r
793 #\r
794 # "Build" binary files are just to copy them to build directory.\r
795 #\r
796 # @retval list The list of files which can be built later\r
797 #\r
798 @cached_property\r
799 def BinaryFileList(self):\r
800 RetVal = []\r
801 for F in self.Module.Binaries:\r
802 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r
803 continue\r
804 RetVal.append(F)\r
805 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r
806 return RetVal\r
807\r
808 @cached_property\r
809 def BuildRules(self):\r
810 RetVal = {}\r
811 BuildRuleDatabase = self.PlatformInfo.BuildRule\r
812 for Type in BuildRuleDatabase.FileTypeList:\r
813 #first try getting build rule by BuildRuleFamily\r
814 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r
815 if not RuleObject:\r
816 # build type is always module type, but ...\r
817 if self.ModuleType != self.BuildType:\r
818 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r
819 #second try getting build rule by ToolChainFamily\r
820 if not RuleObject:\r
821 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r
822 if not RuleObject:\r
823 # build type is always module type, but ...\r
824 if self.ModuleType != self.BuildType:\r
825 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r
826 if not RuleObject:\r
827 continue\r
828 RuleObject = RuleObject.Instantiate(self.Macros)\r
829 RetVal[Type] = RuleObject\r
830 for Ext in RuleObject.SourceFileExtList:\r
831 RetVal[Ext] = RuleObject\r
832 return RetVal\r
833\r
834 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r
835 if self._BuildTargets is None:\r
836 self._IntroBuildTargetList = set()\r
837 self._FinalBuildTargetList = set()\r
838 self._BuildTargets = defaultdict(set)\r
839 self._FileTypes = defaultdict(set)\r
840\r
841 if not BinaryFileList:\r
842 BinaryFileList = self.BinaryFileList\r
843\r
844 SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r
845 if not os.path.exists(SubDirectory):\r
846 CreateDirectory(SubDirectory)\r
847 LastTarget = None\r
848 RuleChain = set()\r
849 SourceList = [File]\r
850 Index = 0\r
851 #\r
852 # Make sure to get build rule order value\r
853 #\r
854 self.BuildOption\r
855\r
856 while Index < len(SourceList):\r
857 Source = SourceList[Index]\r
858 Index = Index + 1\r
859\r
860 if Source != File:\r
861 CreateDirectory(Source.Dir)\r
862\r
863 if File.IsBinary and File == Source and File in BinaryFileList:\r
864 # Skip all files that are not binary libraries\r
865 if not self.IsLibrary:\r
866 continue\r
867 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r
868 elif FileType in self.BuildRules:\r
869 RuleObject = self.BuildRules[FileType]\r
870 elif Source.Ext in self.BuildRules:\r
871 RuleObject = self.BuildRules[Source.Ext]\r
872 else:\r
873 # stop at no more rules\r
874 if LastTarget:\r
875 self._FinalBuildTargetList.add(LastTarget)\r
876 break\r
877\r
878 FileType = RuleObject.SourceFileType\r
879 self._FileTypes[FileType].add(Source)\r
880\r
881 # stop at STATIC_LIBRARY for library\r
882 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r
883 if LastTarget:\r
884 self._FinalBuildTargetList.add(LastTarget)\r
885 break\r
886\r
887 Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r
888 if not Target:\r
889 if LastTarget:\r
890 self._FinalBuildTargetList.add(LastTarget)\r
891 break\r
892 elif not Target.Outputs:\r
893 # Only do build for target with outputs\r
894 self._FinalBuildTargetList.add(Target)\r
895\r
896 self._BuildTargets[FileType].add(Target)\r
897\r
898 if not Source.IsBinary and Source == File:\r
899 self._IntroBuildTargetList.add(Target)\r
900\r
901 # to avoid cyclic rule\r
902 if FileType in RuleChain:\r
903 break\r
904\r
905 RuleChain.add(FileType)\r
906 SourceList.extend(Target.Outputs)\r
907 LastTarget = Target\r
908 FileType = TAB_UNKNOWN_FILE\r
909\r
910 @cached_property\r
911 def Targets(self):\r
912 if self._BuildTargets is None:\r
913 self._IntroBuildTargetList = set()\r
914 self._FinalBuildTargetList = set()\r
915 self._BuildTargets = defaultdict(set)\r
916 self._FileTypes = defaultdict(set)\r
917\r
918 #TRICK: call SourceFileList property to apply build rule for source files\r
919 self.SourceFileList\r
920\r
921 #TRICK: call _GetBinaryFileList to apply build rule for binary files\r
922 self.BinaryFileList\r
923\r
924 return self._BuildTargets\r
925\r
926 @cached_property\r
927 def IntroTargetList(self):\r
928 self.Targets\r
929 return self._IntroBuildTargetList\r
930\r
931 @cached_property\r
932 def CodaTargetList(self):\r
933 self.Targets\r
934 return self._FinalBuildTargetList\r
935\r
936 @cached_property\r
937 def FileTypes(self):\r
938 self.Targets\r
939 return self._FileTypes\r
940\r
941 ## Get the list of package object the module depends on\r
942 #\r
943 # @retval list The package object list\r
944 #\r
945 @cached_property\r
946 def DependentPackageList(self):\r
947 return self.Module.Packages\r
948\r
949 ## Return the list of auto-generated code file\r
950 #\r
951 # @retval list The list of auto-generated file\r
952 #\r
953 @cached_property\r
954 def AutoGenFileList(self):\r
955 AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r
956 UniStringBinBuffer = BytesIO()\r
957 IdfGenBinBuffer = BytesIO()\r
958 RetVal = {}\r
959 AutoGenC = TemplateString()\r
960 AutoGenH = TemplateString()\r
961 StringH = TemplateString()\r
962 StringIdf = TemplateString()\r
963 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r
964 #\r
965 # AutoGen.c is generated if there are library classes in inf, or there are object files\r
966 #\r
967 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r
968 or TAB_OBJECT_FILE in self.FileTypes):\r
969 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r
970 RetVal[AutoFile] = str(AutoGenC)\r
971 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
972 if str(AutoGenH) != "":\r
973 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r
974 RetVal[AutoFile] = str(AutoGenH)\r
975 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
976 if str(StringH) != "":\r
977 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
978 RetVal[AutoFile] = str(StringH)\r
979 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
980 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
981 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
982 RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
983 AutoFile.IsBinary = True\r
984 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
985 if UniStringBinBuffer is not None:\r
986 UniStringBinBuffer.close()\r
987 if str(StringIdf) != "":\r
988 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
989 RetVal[AutoFile] = str(StringIdf)\r
990 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
991 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
992 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
993 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
994 AutoFile.IsBinary = True\r
995 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
996 if IdfGenBinBuffer is not None:\r
997 IdfGenBinBuffer.close()\r
998 return RetVal\r
999\r
1000 ## Return the list of library modules explicitly or implicitly used by this module\r
1001 @cached_property\r
1002 def DependentLibraryList(self):\r
1003 # only merge library classes and PCD for non-library module\r
1004 if self.IsLibrary:\r
1005 return []\r
1006 return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r
1007\r
1008 ## Get the list of PCDs from current module\r
1009 #\r
1010 # @retval list The list of PCD\r
1011 #\r
1012 @cached_property\r
1013 def ModulePcdList(self):\r
1014 # apply PCD settings from platform\r
1015 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r
1016\r
1017 return RetVal\r
1018 @cached_property\r
1019 def _PcdComments(self):\r
1020 ReVal = OrderedListDict()\r
1021 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)\r
1022 if not self.IsLibrary:\r
1023 for Library in self.DependentLibraryList:\r
1024 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)\r
1025 return ReVal\r
1026\r
1027 ## Get the list of PCDs from dependent libraries\r
1028 #\r
1029 # @retval list The list of PCD\r
1030 #\r
1031 @cached_property\r
1032 def LibraryPcdList(self):\r
1033 if self.IsLibrary:\r
1034 return []\r
1035 RetVal = []\r
1036 Pcds = set()\r
1037 # get PCDs from dependent libraries\r
1038 for Library in self.DependentLibraryList:\r
1039 PcdsInLibrary = OrderedDict()\r
1040 for Key in Library.Pcds:\r
1041 # skip duplicated PCDs\r
1042 if Key in self.Module.Pcds or Key in Pcds:\r
1043 continue\r
1044 Pcds.add(Key)\r
1045 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r
1046 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r
1047 return RetVal\r
1048\r
1049 ## Get the GUID value mapping\r
1050 #\r
1051 # @retval dict The mapping between GUID cname and its value\r
1052 #\r
1053 @cached_property\r
1054 def GuidList(self):\r
1055 RetVal = self.Module.Guids\r
1056 for Library in self.DependentLibraryList:\r
1057 RetVal.update(Library.Guids)\r
1058 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r
1059 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r
1060 return RetVal\r
1061\r
1062 @cached_property\r
1063 def GetGuidsUsedByPcd(self):\r
1064 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r
1065 for Library in self.DependentLibraryList:\r
1066 RetVal.update(Library.GetGuidsUsedByPcd())\r
1067 return RetVal\r
1068 ## Get the protocol value mapping\r
1069 #\r
1070 # @retval dict The mapping between protocol cname and its value\r
1071 #\r
1072 @cached_property\r
1073 def ProtocolList(self):\r
1074 RetVal = OrderedDict(self.Module.Protocols)\r
1075 for Library in self.DependentLibraryList:\r
1076 RetVal.update(Library.Protocols)\r
1077 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r
1078 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r
1079 return RetVal\r
1080\r
1081 ## Get the PPI value mapping\r
1082 #\r
1083 # @retval dict The mapping between PPI cname and its value\r
1084 #\r
1085 @cached_property\r
1086 def PpiList(self):\r
1087 RetVal = OrderedDict(self.Module.Ppis)\r
1088 for Library in self.DependentLibraryList:\r
1089 RetVal.update(Library.Ppis)\r
1090 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r
1091 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r
1092 return RetVal\r
1093\r
1094 ## Get the list of include search path\r
1095 #\r
1096 # @retval list The list path\r
1097 #\r
1098 @cached_property\r
1099 def IncludePathList(self):\r
1100 RetVal = []\r
1101 RetVal.append(self.MetaFile.Dir)\r
1102 RetVal.append(self.DebugDir)\r
1103\r
1104 for Package in self.Module.Packages:\r
1105 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
1106 if PackageDir not in RetVal:\r
1107 RetVal.append(PackageDir)\r
1108 IncludesList = Package.Includes\r
1109 if Package._PrivateIncludes:\r
1110 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r
1111 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
1112 for Inc in IncludesList:\r
1113 if Inc not in RetVal:\r
1114 RetVal.append(str(Inc))\r
1115 return RetVal\r
1116\r
1117 @cached_property\r
1118 def IncludePathLength(self):\r
1119 return sum(len(inc)+1 for inc in self.IncludePathList)\r
1120\r
82407bd1
RC
1121 ## Get the list of include paths from the packages\r
1122 #\r
1123 # @IncludesList list The list path\r
1124 #\r
1125 @cached_property\r
1126 def PackageIncludePathList(self):\r
1127 IncludesList = []\r
1128 for Package in self.Module.Packages:\r
1129 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
1130 IncludesList = Package.Includes\r
1131 if Package._PrivateIncludes:\r
1132 if not self.MetaFile.Path.startswith(PackageDir):\r
1133 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
1134 return IncludesList\r
1135\r
e8449e1d
FB
1136 ## Get HII EX PCDs which maybe used by VFR\r
1137 #\r
1138 # efivarstore used by VFR may relate with HII EX PCDs\r
1139 # Get the variable name and GUID from efivarstore and HII EX PCD\r
1140 # List the HII EX PCDs in As Built INF if both name and GUID match.\r
1141 #\r
1142 # @retval list HII EX PCDs\r
1143 #\r
1144 def _GetPcdsMaybeUsedByVfr(self):\r
1145 if not self.SourceFileList:\r
1146 return []\r
1147\r
1148 NameGuids = set()\r
1149 for SrcFile in self.SourceFileList:\r
1150 if SrcFile.Ext.lower() != '.vfr':\r
1151 continue\r
1152 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r
1153 if not os.path.exists(Vfri):\r
1154 continue\r
1155 VfriFile = open(Vfri, 'r')\r
1156 Content = VfriFile.read()\r
1157 VfriFile.close()\r
1158 Pos = Content.find('efivarstore')\r
1159 while Pos != -1:\r
1160 #\r
1161 # Make sure 'efivarstore' is the start of efivarstore statement\r
1162 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r
1163 #\r
1164 Index = Pos - 1\r
1165 while Index >= 0 and Content[Index] in ' \t\r\n':\r
1166 Index -= 1\r
1167 if Index >= 0 and Content[Index] != ';':\r
1168 Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r
1169 continue\r
1170 #\r
1171 # 'efivarstore' must be followed by name and guid\r
1172 #\r
1173 Name = gEfiVarStoreNamePattern.search(Content, Pos)\r
1174 if not Name:\r
1175 break\r
1176 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r
1177 if not Guid:\r
1178 break\r
1179 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
1180 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r
1181 Pos = Content.find('efivarstore', Name.end())\r
1182 if not NameGuids:\r
1183 return []\r
1184 HiiExPcds = []\r
1185 for Pcd in self.PlatformInfo.Pcds.values():\r
1186 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r
1187 continue\r
1188 for SkuInfo in Pcd.SkuInfoList.values():\r
1189 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r
1190 if not Value:\r
1191 continue\r
1192 Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r
1193 Guid = GuidStructureStringToGuidString(Value)\r
1194 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r
1195 HiiExPcds.append(Pcd)\r
1196 break\r
1197\r
1198 return HiiExPcds\r
1199\r
1200 def _GenOffsetBin(self):\r
1201 VfrUniBaseName = {}\r
1202 for SourceFile in self.Module.Sources:\r
1203 if SourceFile.Type.upper() == ".VFR" :\r
1204 #\r
1205 # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r
1206 #\r
1207 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r
1208 elif SourceFile.Type.upper() == ".UNI" :\r
1209 #\r
1210 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r
1211 #\r
1212 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r
1213\r
1214 if not VfrUniBaseName:\r
1215 return None\r
1216 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r
1217 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r
1218 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r
1219 if not VfrUniOffsetList:\r
1220 return None\r
1221\r
1222 OutputName = '%sOffset.bin' % self.Name\r
1223 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r
1224\r
1225 try:\r
1226 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r
1227 except:\r
1228 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
1229\r
1230 # Use a instance of BytesIO to cache data\r
1231 fStringIO = BytesIO()\r
1232\r
1233 for Item in VfrUniOffsetList:\r
1234 if (Item[0].find("Strings") != -1):\r
1235 #\r
1236 # UNI offset in image.\r
1237 # GUID + Offset\r
1238 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
1239 #\r
1240 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
1241 fStringIO.write(UniGuid)\r
1242 UniValue = pack ('Q', int (Item[1], 16))\r
1243 fStringIO.write (UniValue)\r
1244 else:\r
1245 #\r
1246 # VFR binary offset in image.\r
1247 # GUID + Offset\r
1248 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
1249 #\r
1250 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
1251 fStringIO.write(VfrGuid)\r
1252 VfrValue = pack ('Q', int (Item[1], 16))\r
1253 fStringIO.write (VfrValue)\r
1254 #\r
1255 # write data into file.\r
1256 #\r
1257 try :\r
1258 fInputfile.write (fStringIO.getvalue())\r
1259 except:\r
1260 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r
1261 "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r
1262\r
1263 fStringIO.close ()\r
1264 fInputfile.close ()\r
1265 return OutputName\r
d01a9986 1266\r
e8449e1d
FB
1267 @cached_property\r
1268 def OutputFile(self):\r
1269 retVal = set()\r
40db176d 1270\r
e8449e1d
FB
1271 OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
1272 DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
1273 for Item in self.CodaTargetList:\r
1274 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
40db176d
SS
1275 NewFile = path.join(self.OutputDir, File)\r
1276 retVal.add(NewFile)\r
e8449e1d
FB
1277\r
1278 Bin = self._GenOffsetBin()\r
1279 if Bin:\r
40db176d
SS
1280 NewFile = path.join(self.OutputDir, Bin)\r
1281 retVal.add(NewFile)\r
e8449e1d 1282\r
40db176d 1283 for Root, Dirs, Files in os.walk(self.OutputDir):\r
e8449e1d 1284 for File in Files:\r
40db176d
SS
1285 # lib file is already added through above CodaTargetList, skip it here\r
1286 if not (File.lower().endswith('.obj') or File.lower().endswith('.lib')):\r
1287 NewFile = path.join(self.OutputDir, File)\r
1288 retVal.add(NewFile)\r
e8449e1d 1289\r
40db176d 1290 for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
d01a9986 1291 for File in Files:\r
40db176d
SS
1292 NewFile = path.join(self.FfsOutputDir, File)\r
1293 retVal.add(NewFile)\r
d01a9986 1294\r
e8449e1d
FB
1295 return retVal\r
1296\r
1297 ## Create AsBuilt INF file the module\r
1298 #\r
1299 def CreateAsBuiltInf(self):\r
1300\r
1301 if self.IsAsBuiltInfCreated:\r
1302 return\r
1303\r
1304 # Skip INF file generation for libraries\r
1305 if self.IsLibrary:\r
1306 return\r
1307\r
1308 # Skip the following code for modules with no source files\r
1309 if not self.SourceFileList:\r
1310 return\r
1311\r
1312 # Skip the following code for modules without any binary files\r
1313 if self.BinaryFileList:\r
1314 return\r
1315\r
1316 ### TODO: How to handles mixed source and binary modules\r
1317\r
1318 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r
1319 # Also find all packages that the DynamicEx PCDs depend on\r
1320 Pcds = []\r
1321 PatchablePcds = []\r
1322 Packages = []\r
1323 PcdCheckList = []\r
1324 PcdTokenSpaceList = []\r
1325 for Pcd in self.ModulePcdList + self.LibraryPcdList:\r
1326 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r
1327 PatchablePcds.append(Pcd)\r
1328 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r
1329 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
1330 if Pcd not in Pcds:\r
1331 Pcds.append(Pcd)\r
1332 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r
1333 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r
1334 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r
1335 GuidList = OrderedDict(self.GuidList)\r
1336 for TokenSpace in self.GetGuidsUsedByPcd:\r
1337 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r
1338 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r
1339 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r
1340 GuidList.pop(TokenSpace)\r
1341 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r
1342 for Package in self.DerivedPackageList:\r
1343 if Package in Packages:\r
1344 continue\r
1345 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r
1346 Found = False\r
1347 for Index in range(len(BeChecked)):\r
1348 for Item in CheckList[Index]:\r
1349 if Item in BeChecked[Index]:\r
1350 Packages.append(Package)\r
1351 Found = True\r
1352 break\r
1353 if Found:\r
1354 break\r
1355\r
1356 VfrPcds = self._GetPcdsMaybeUsedByVfr()\r
1357 for Pkg in self.PlatformInfo.PackageList:\r
1358 if Pkg in Packages:\r
1359 continue\r
1360 for VfrPcd in VfrPcds:\r
1361 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r
1362 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r
1363 Packages.append(Pkg)\r
1364 break\r
1365\r
1366 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r
1367 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r
1368 Guid = self.Guid\r
1369 MDefs = self.Module.Defines\r
1370\r
1371 AsBuiltInfDict = {\r
1372 'module_name' : self.Name,\r
1373 'module_guid' : Guid,\r
1374 'module_module_type' : ModuleType,\r
1375 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r
1376 'pcd_is_driver_string' : [],\r
1377 'module_uefi_specification_version' : [],\r
1378 'module_pi_specification_version' : [],\r
1379 'module_entry_point' : self.Module.ModuleEntryPointList,\r
1380 'module_unload_image' : self.Module.ModuleUnloadImageList,\r
1381 'module_constructor' : self.Module.ConstructorList,\r
1382 'module_destructor' : self.Module.DestructorList,\r
1383 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r
1384 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r
1385 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r
1386 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r
1387 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r
1388 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r
1389 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r
1390 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r
1391 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r
1392 'module_arch' : self.Arch,\r
1393 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r
1394 'binary_item' : [],\r
1395 'patchablepcd_item' : [],\r
1396 'pcd_item' : [],\r
1397 'protocol_item' : [],\r
1398 'ppi_item' : [],\r
1399 'guid_item' : [],\r
1400 'flags_item' : [],\r
1401 'libraryclasses_item' : []\r
1402 }\r
1403\r
1404 if 'MODULE_UNI_FILE' in MDefs:\r
1405 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r
1406 if os.path.isfile(UNIFile):\r
1407 shutil.copy2(UNIFile, self.OutputDir)\r
1408\r
1409 if self.AutoGenVersion > int(gInfSpecVersion, 0):\r
1410 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r
1411 else:\r
1412 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r
1413\r
1414 if DriverType:\r
1415 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r
1416\r
1417 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r
1418 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r
1419 if 'PI_SPECIFICATION_VERSION' in self.Specification:\r
1420 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r
1421\r
1422 OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
1423 DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
1424 for Item in self.CodaTargetList:\r
1425 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
1426 if os.path.isabs(File):\r
1427 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
1428 if Item.Target.Ext.lower() == '.aml':\r
1429 AsBuiltInfDict['binary_item'].append('ASL|' + File)\r
1430 elif Item.Target.Ext.lower() == '.acpi':\r
1431 AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r
1432 elif Item.Target.Ext.lower() == '.efi':\r
1433 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r
1434 else:\r
1435 AsBuiltInfDict['binary_item'].append('BIN|' + File)\r
1436 if not self.DepexGenerated:\r
1437 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r
1438 if os.path.exists(DepexFile):\r
1439 self.DepexGenerated = True\r
1440 if self.DepexGenerated:\r
1441 if self.ModuleType in [SUP_MODULE_PEIM]:\r
1442 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r
1443 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r
1444 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r
1445 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r
1446 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r
1447\r
1448 Bin = self._GenOffsetBin()\r
1449 if Bin:\r
1450 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r
1451\r
1452 for Root, Dirs, Files in os.walk(OutputDir):\r
1453 for File in Files:\r
1454 if File.lower().endswith('.pdb'):\r
1455 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r
1456 HeaderComments = self.Module.HeaderComments\r
1457 StartPos = 0\r
1458 for Index in range(len(HeaderComments)):\r
1459 if HeaderComments[Index].find('@BinaryHeader') != -1:\r
1460 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r
1461 StartPos = Index\r
1462 break\r
1463 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r
1464 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r
1465\r
1466 GenList = [\r
1467 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r
1468 (self.PpiList, self._PpiComments, 'ppi_item'),\r
1469 (GuidList, self._GuidComments, 'guid_item')\r
1470 ]\r
1471 for Item in GenList:\r
1472 for CName in Item[0]:\r
1473 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r
1474 Entry = Comments + '\n ' + CName if Comments else CName\r
1475 AsBuiltInfDict[Item[2]].append(Entry)\r
1476 PatchList = parsePcdInfoFromMapFile(\r
1477 os.path.join(self.OutputDir, self.Name + '.map'),\r
1478 os.path.join(self.OutputDir, self.Name + '.efi')\r
1479 )\r
1480 if PatchList:\r
1481 for Pcd in PatchablePcds:\r
1482 TokenCName = Pcd.TokenCName\r
1483 for PcdItem in GlobalData.MixedPcd:\r
1484 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
1485 TokenCName = PcdItem[0]\r
1486 break\r
1487 for PatchPcd in PatchList:\r
1488 if TokenCName == PatchPcd[0]:\r
1489 break\r
1490 else:\r
1491 continue\r
1492 PcdValue = ''\r
1493 if Pcd.DatumType == 'BOOLEAN':\r
1494 BoolValue = Pcd.DefaultValue.upper()\r
1495 if BoolValue == 'TRUE':\r
1496 Pcd.DefaultValue = '1'\r
1497 elif BoolValue == 'FALSE':\r
1498 Pcd.DefaultValue = '0'\r
1499\r
1500 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r
1501 HexFormat = '0x%02x'\r
1502 if Pcd.DatumType == TAB_UINT16:\r
1503 HexFormat = '0x%04x'\r
1504 elif Pcd.DatumType == TAB_UINT32:\r
1505 HexFormat = '0x%08x'\r
1506 elif Pcd.DatumType == TAB_UINT64:\r
1507 HexFormat = '0x%016x'\r
1508 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r
1509 else:\r
1510 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
1511 EdkLogger.error("build", AUTOGEN_ERROR,\r
1512 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1513 )\r
1514 ArraySize = int(Pcd.MaxDatumSize, 0)\r
1515 PcdValue = Pcd.DefaultValue\r
1516 if PcdValue[0] != '{':\r
1517 Unicode = False\r
1518 if PcdValue[0] == 'L':\r
1519 Unicode = True\r
1520 PcdValue = PcdValue.lstrip('L')\r
1521 PcdValue = eval(PcdValue)\r
1522 NewValue = '{'\r
1523 for Index in range(0, len(PcdValue)):\r
1524 if Unicode:\r
1525 CharVal = ord(PcdValue[Index])\r
1526 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r
1527 + '0x%02x' % (CharVal >> 8) + ', '\r
1528 else:\r
1529 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r
1530 Padding = '0x00, '\r
1531 if Unicode:\r
1532 Padding = Padding * 2\r
1533 ArraySize = ArraySize // 2\r
1534 if ArraySize < (len(PcdValue) + 1):\r
1535 if Pcd.MaxSizeUserSet:\r
1536 EdkLogger.error("build", AUTOGEN_ERROR,\r
1537 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1538 )\r
1539 else:\r
1540 ArraySize = len(PcdValue) + 1\r
1541 if ArraySize > len(PcdValue) + 1:\r
1542 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r
1543 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r
1544 elif len(PcdValue.split(',')) <= ArraySize:\r
1545 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r
1546 PcdValue += '}'\r
1547 else:\r
1548 if Pcd.MaxSizeUserSet:\r
1549 EdkLogger.error("build", AUTOGEN_ERROR,\r
1550 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1551 )\r
1552 else:\r
1553 ArraySize = len(PcdValue) + 1\r
1554 PcdItem = '%s.%s|%s|0x%X' % \\r
1555 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r
1556 PcdComments = ''\r
1557 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
1558 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r
1559 if PcdComments:\r
1560 PcdItem = PcdComments + '\n ' + PcdItem\r
1561 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r
1562\r
1563 for Pcd in Pcds + VfrPcds:\r
1564 PcdCommentList = []\r
1565 HiiInfo = ''\r
1566 TokenCName = Pcd.TokenCName\r
1567 for PcdItem in GlobalData.MixedPcd:\r
1568 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
1569 TokenCName = PcdItem[0]\r
1570 break\r
1571 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r
1572 for SkuName in Pcd.SkuInfoList:\r
1573 SkuInfo = Pcd.SkuInfoList[SkuName]\r
1574 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r
1575 break\r
1576 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
1577 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r
1578 if HiiInfo:\r
1579 UsageIndex = -1\r
1580 UsageStr = ''\r
1581 for Index, Comment in enumerate(PcdCommentList):\r
1582 for Usage in UsageList:\r
1583 if Comment.find(Usage) != -1:\r
1584 UsageStr = Usage\r
1585 UsageIndex = Index\r
1586 break\r
1587 if UsageIndex != -1:\r
1588 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r
1589 else:\r
1590 PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r
1591 PcdComments = '\n '.join(PcdCommentList)\r
1592 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r
1593 if PcdComments:\r
1594 PcdEntry = PcdComments + '\n ' + PcdEntry\r
1595 AsBuiltInfDict['pcd_item'].append(PcdEntry)\r
1596 for Item in self.BuildOption:\r
1597 if 'FLAGS' in self.BuildOption[Item]:\r
1598 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r
1599\r
1600 # Generated LibraryClasses section in comments.\r
1601 for Library in self.LibraryAutoGenList:\r
1602 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r
1603\r
1604 # Generated UserExtensions TianoCore section.\r
1605 # All tianocore user extensions are copied.\r
1606 UserExtStr = ''\r
1607 for TianoCore in self._GetTianoCoreUserExtensionList():\r
1608 UserExtStr += '\n'.join(TianoCore)\r
1609 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r
1610 if os.path.isfile(ExtensionFile):\r
1611 shutil.copy2(ExtensionFile, self.OutputDir)\r
1612 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
1613\r
1614 # Generated depex expression section in comments.\r
1615 DepexExpression = self._GetDepexExpresionString()\r
1616 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r
1617\r
1618 AsBuiltInf = TemplateString()\r
1619 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r
1620\r
1621 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r
1622\r
1623 self.IsAsBuiltInfCreated = True\r
1624\r
0e7e7a26
SS
1625 def CacheCopyFile(self, OriginDir, CopyDir, File):\r
1626 sub_dir = os.path.relpath(File, CopyDir)\r
1627 destination_file = os.path.join(OriginDir, sub_dir)\r
1628 destination_dir = os.path.dirname(destination_file)\r
1629 CreateDirectory(destination_dir)\r
1630 try:\r
1631 CopyFileOnChange(File, destination_dir)\r
1632 except:\r
1633 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r
1634 return\r
1635\r
e8449e1d 1636 def CopyModuleToCache(self):\r
0e7e7a26
SS
1637 self.GenPreMakefileHash(GlobalData.gCacheIR)\r
1638 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
1639 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
1640 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1641 return False\r
1642\r
1643 self.GenMakeHash(GlobalData.gCacheIR)\r
1644 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
1645 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
1646 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
1647 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1648 return False\r
1649\r
1650 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
1651 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
1652 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
1653\r
e8449e1d 1654 CreateDirectory (FileDir)\r
0e7e7a26 1655 self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
e8449e1d
FB
1656 ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
1657 if os.path.exists(ModuleFile):\r
1658 CopyFileOnChange(ModuleFile, FileDir)\r
1659 if not self.OutputFile:\r
1660 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
1661 self.OutputFile = Ma.Binaries\r
1662 for File in self.OutputFile:\r
e8449e1d 1663 if os.path.exists(File):\r
40db176d 1664 if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
d01a9986
SS
1665 self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)\r
1666 else:\r
1667 self.CacheCopyFile(FileDir, self.OutputDir, File)\r
e8449e1d 1668\r
0e7e7a26
SS
1669 def SaveHashChainFileToCache(self, gDict):\r
1670 if not GlobalData.gBinCacheDest:\r
e8449e1d 1671 return False\r
0e7e7a26
SS
1672\r
1673 self.GenPreMakefileHash(gDict)\r
1674 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
1675 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
1676 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1677 return False\r
1678\r
1679 self.GenMakeHash(gDict)\r
1680 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
1681 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
1682 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
1683 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
1684 return False\r
1685\r
1686 # save the hash chain list as cache file\r
1687 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
1688 CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
1689 CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
1690 ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
1691 MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
1692 ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
1693\r
1694 # save the HashChainDict as json file\r
1695 CreateDirectory (CacheDestDir)\r
1696 CreateDirectory (CacheHashDestDir)\r
1697 try:\r
1698 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
1699 if os.path.exists(ModuleHashPair):\r
94459080
SS
1700 with open(ModuleHashPair, 'r') as f:\r
1701 ModuleHashPairList = json.load(f)\r
0e7e7a26
SS
1702 PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
1703 MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
1704 ModuleHashPairList.append((PreMakeHash, MakeHash))\r
1705 ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
1706 with open(ModuleHashPair, 'w') as f:\r
1707 json.dump(ModuleHashPairList, f, indent=2)\r
1708 except:\r
1709 EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
1710 return False\r
1711\r
1712 try:\r
1713 with open(MakeHashChain, 'w') as f:\r
1714 json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
1715 except:\r
1716 EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
1717 return False\r
1718\r
1719 try:\r
1720 with open(ModuleFilesChain, 'w') as f:\r
1721 json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
1722 except:\r
1723 EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
1724 return False\r
1725\r
1726 # save the autogenfile and makefile for debug usage\r
1727 CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
1728 CreateDirectory (CacheDebugDir)\r
1729 CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
1730 if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
1731 for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
1732 CopyFileOnChange(str(File), CacheDebugDir)\r
1733\r
1734 return True\r
e8449e1d
FB
1735\r
1736 ## Create makefile for the module and its dependent libraries\r
1737 #\r
1738 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
1739 # dependent libraries will be created\r
1740 #\r
1741 @cached_class_function\r
1742 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
0e7e7a26
SS
1743 gDict = GlobalData.gCacheIR\r
1744 if (self.MetaFile.Path, self.Arch) in gDict and \\r
1745 gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
1746 return\r
1747\r
e8449e1d
FB
1748 # nest this function inside it's only caller.\r
1749 def CreateTimeStamp():\r
1750 FileSet = {self.MetaFile.Path}\r
1751\r
1752 for SourceFile in self.Module.Sources:\r
1753 FileSet.add (SourceFile.Path)\r
1754\r
1755 for Lib in self.DependentLibraryList:\r
1756 FileSet.add (Lib.MetaFile.Path)\r
1757\r
1758 for f in self.AutoGenDepSet:\r
1759 FileSet.add (f.Path)\r
1760\r
1761 if os.path.exists (self.TimeStampPath):\r
1762 os.remove (self.TimeStampPath)\r
df43ea6c
FB
1763\r
1764 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
e8449e1d
FB
1765\r
1766 # Ignore generating makefile when it is a binary module\r
1767 if self.IsBinaryModule:\r
1768 return\r
1769\r
1770 self.GenFfsList = GenFfsList\r
1771\r
1772 if not self.IsLibrary and CreateLibraryMakeFile:\r
1773 for LibraryAutoGen in self.LibraryAutoGenList:\r
1774 LibraryAutoGen.CreateMakeFile()\r
673d09a2 1775\r
0e7e7a26
SS
1776 # CanSkip uses timestamps to determine build skipping\r
1777 if self.CanSkip():\r
e8449e1d
FB
1778 return\r
1779\r
1780 if len(self.CustomMakefile) == 0:\r
1781 Makefile = GenMake.ModuleMakefile(self)\r
1782 else:\r
1783 Makefile = GenMake.CustomMakefile(self)\r
1784 if Makefile.Generate():\r
1785 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r
1786 (self.Name, self.Arch))\r
1787 else:\r
1788 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r
1789 (self.Name, self.Arch))\r
1790\r
1791 CreateTimeStamp()\r
1792\r
0e7e7a26
SS
1793 MakefileType = Makefile._FileType\r
1794 MakefileName = Makefile._FILE_NAME_[MakefileType]\r
1795 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
1796\r
1797 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
1798 MewIR.MakefilePath = MakefilePath\r
1799 MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
1800 MewIR.CreateMakeFileDone = True\r
94459080 1801 with GlobalData.cache_lock:\r
0e7e7a26
SS
1802 try:\r
1803 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
1804 IR.MakefilePath = MakefilePath\r
1805 IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
1806 IR.CreateMakeFileDone = True\r
1807 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
1808 except:\r
1809 gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
1810\r
e8449e1d
FB
1811 def CopyBinaryFiles(self):\r
1812 for File in self.Module.Binaries:\r
1813 SrcPath = File.Path\r
1814 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r
1815 CopyLongFilePath(SrcPath, DstPath)\r
1816 ## Create autogen code for the module and its dependent libraries\r
1817 #\r
1818 # @param CreateLibraryCodeFile Flag indicating if or not the code of\r
1819 # dependent libraries will be created\r
1820 #\r
1821 def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
0e7e7a26
SS
1822 gDict = GlobalData.gCacheIR\r
1823 if (self.MetaFile.Path, self.Arch) in gDict and \\r
1824 gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
1825 return\r
1826\r
e8449e1d
FB
1827 if self.IsCodeFileCreated:\r
1828 return\r
1829\r
1830 # Need to generate PcdDatabase even PcdDriver is binarymodule\r
1831 if self.IsBinaryModule and self.PcdIsDriver != '':\r
1832 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
1833 return\r
1834 if self.IsBinaryModule:\r
1835 if self.IsLibrary:\r
1836 self.CopyBinaryFiles()\r
1837 return\r
1838\r
1839 if not self.IsLibrary and CreateLibraryCodeFile:\r
1840 for LibraryAutoGen in self.LibraryAutoGenList:\r
1841 LibraryAutoGen.CreateCodeFile()\r
0e7e7a26
SS
1842\r
1843 # CanSkip uses timestamps to determine build skipping\r
1844 if self.CanSkip():\r
e8449e1d
FB
1845 return\r
1846\r
1847 AutoGenList = []\r
1848 IgoredAutoGenList = []\r
1849\r
1850 for File in self.AutoGenFileList:\r
1851 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r
1852 AutoGenList.append(str(File))\r
1853 else:\r
1854 IgoredAutoGenList.append(str(File))\r
1855\r
1856\r
1857 for ModuleType in self.DepexList:\r
1858 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r
1859 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:\r
1860 continue\r
1861\r
1862 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r
1863 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r
1864\r
1865 if len(Dpx.PostfixNotation) != 0:\r
1866 self.DepexGenerated = True\r
1867\r
1868 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r
1869 AutoGenList.append(str(DpxFile))\r
1870 else:\r
1871 IgoredAutoGenList.append(str(DpxFile))\r
1872\r
1873 if IgoredAutoGenList == []:\r
1874 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r
1875 (" ".join(AutoGenList), self.Name, self.Arch))\r
1876 elif AutoGenList == []:\r
1877 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r
1878 (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r
1879 else:\r
1880 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r
1881 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
1882\r
1883 self.IsCodeFileCreated = True\r
0e7e7a26
SS
1884 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
1885 MewIR.CreateCodeFileDone = True\r
94459080 1886 with GlobalData.cache_lock:\r
0e7e7a26
SS
1887 try:\r
1888 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
1889 IR.CreateCodeFileDone = True\r
1890 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
1891 except:\r
1892 gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
1893\r
e8449e1d
FB
1894 return AutoGenList\r
1895\r
1896 ## Summarize the ModuleAutoGen objects of all libraries used by this module\r
1897 @cached_property\r
1898 def LibraryAutoGenList(self):\r
1899 RetVal = []\r
1900 for Library in self.DependentLibraryList:\r
1901 La = ModuleAutoGen(\r
1902 self.Workspace,\r
1903 Library.MetaFile,\r
1904 self.BuildTarget,\r
1905 self.ToolChain,\r
1906 self.Arch,\r
1907 self.PlatformInfo.MetaFile,\r
1908 self.DataPipe\r
1909 )\r
1910 La.IsLibrary = True\r
1911 if La not in RetVal:\r
1912 RetVal.append(La)\r
1913 for Lib in La.CodaTargetList:\r
1914 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
1915 return RetVal\r
1916\r
1917 def GenModuleHash(self):\r
1918 # Initialize a dictionary for each arch type\r
1919 if self.Arch not in GlobalData.gModuleHash:\r
1920 GlobalData.gModuleHash[self.Arch] = {}\r
1921\r
1922 # Early exit if module or library has been hashed and is in memory\r
1923 if self.Name in GlobalData.gModuleHash[self.Arch]:\r
1924 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
1925\r
1926 # Initialze hash object\r
1927 m = hashlib.md5()\r
1928\r
1929 # Add Platform level hash\r
1930 m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
1931\r
1932 # Add Package level hash\r
1933 if self.DependentPackageList:\r
1934 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
1935 if Pkg.PackageName in GlobalData.gPackageHash:\r
1936 m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
1937\r
1938 # Add Library hash\r
1939 if self.LibraryAutoGenList:\r
1940 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
1941 if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
1942 Lib.GenModuleHash()\r
1943 m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
1944\r
1945 # Add Module self\r
94459080
SS
1946 with open(str(self.MetaFile), 'rb') as f:\r
1947 Content = f.read()\r
e8449e1d
FB
1948 m.update(Content)\r
1949\r
1950 # Add Module's source files\r
1951 if self.SourceFileList:\r
1952 for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
1953 f = open(str(File), 'rb')\r
1954 Content = f.read()\r
1955 f.close()\r
1956 m.update(Content)\r
1957\r
1958 GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
1959\r
1960 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
1961\r
0e7e7a26
SS
1962 def GenModuleFilesHash(self, gDict):\r
1963 # Early exit if module or library has been hashed and is in memory\r
1964 if (self.MetaFile.Path, self.Arch) in gDict:\r
1965 if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
1966 return gDict[(self.MetaFile.Path, self.Arch)]\r
1967\r
94459080
SS
1968 # skip if the module cache already crashed\r
1969 if (self.MetaFile.Path, self.Arch) in gDict and \\r
1970 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
1971 return\r
1972\r
0e7e7a26
SS
1973 DependencyFileSet = set()\r
1974 # Add Module Meta file\r
1975 DependencyFileSet.add(self.MetaFile)\r
1976\r
1977 # Add Module's source files\r
1978 if self.SourceFileList:\r
1979 for File in set(self.SourceFileList):\r
1980 DependencyFileSet.add(File)\r
1981\r
1982 # Add modules's include header files\r
1983 # Search dependency file list for each source file\r
1984 SourceFileList = []\r
1985 OutPutFileList = []\r
1986 for Target in self.IntroTargetList:\r
1987 SourceFileList.extend(Target.Inputs)\r
1988 OutPutFileList.extend(Target.Outputs)\r
1989 if OutPutFileList:\r
1990 for Item in OutPutFileList:\r
1991 if Item in SourceFileList:\r
1992 SourceFileList.remove(Item)\r
1993 SearchList = []\r
1994 for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
1995 # skip the folders in platform BuildDir which are not been generated yet\r
1996 if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
1997 continue\r
1998 SearchList.append(file_path)\r
1999 FileDependencyDict = {}\r
2000 ForceIncludedFile = []\r
2001 for F in SourceFileList:\r
2002 # skip the files which are not been generated yet, because\r
2003 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
2004 if not os.path.exists(F.Path):\r
2005 continue\r
2006 FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
2007\r
2008 if FileDependencyDict:\r
2009 for Dependency in FileDependencyDict.values():\r
2010 DependencyFileSet.update(set(Dependency))\r
2011\r
2012 # Caculate all above dependency files hash\r
2013 # Initialze hash object\r
2014 FileList = []\r
2015 m = hashlib.md5()\r
2016 for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
2017 if not os.path.exists(str(File)):\r
2018 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
2019 continue\r
94459080
SS
2020 with open(str(File), 'rb') as f:\r
2021 Content = f.read()\r
0e7e7a26
SS
2022 m.update(Content)\r
2023 FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
2024\r
2025\r
2026 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
2027 MewIR.ModuleFilesHashDigest = m.digest()\r
2028 MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
2029 MewIR.ModuleFilesChain = FileList\r
94459080 2030 with GlobalData.cache_lock:\r
0e7e7a26
SS
2031 try:\r
2032 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2033 IR.ModuleFilesHashDigest = m.digest()\r
2034 IR.ModuleFilesHashHexDigest = m.hexdigest()\r
2035 IR.ModuleFilesChain = FileList\r
2036 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2037 except:\r
2038 gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
2039\r
2040 return gDict[(self.MetaFile.Path, self.Arch)]\r
2041\r
2042 def GenPreMakefileHash(self, gDict):\r
2043 # Early exit if module or library has been hashed and is in memory\r
2044 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2045 gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
2046 return gDict[(self.MetaFile.Path, self.Arch)]\r
2047\r
94459080
SS
2048 # skip if the module cache already crashed\r
2049 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2050 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2051 return\r
2052\r
0e7e7a26
SS
2053 # skip binary module\r
2054 if self.IsBinaryModule:\r
2055 return\r
2056\r
2057 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2058 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
2059 self.GenModuleFilesHash(gDict)\r
2060\r
2061 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2062 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
373298ca
FB
2063 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2064 return\r
0e7e7a26
SS
2065\r
2066 # Initialze hash object\r
2067 m = hashlib.md5()\r
2068\r
2069 # Add Platform level hash\r
2070 if ('PlatformHash') in gDict:\r
2071 m.update(gDict[('PlatformHash')].encode('utf-8'))\r
2072 else:\r
2073 EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
2074\r
2075 # Add Package level hash\r
2076 if self.DependentPackageList:\r
2077 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
2078 if (Pkg.PackageName, 'PackageHash') in gDict:\r
2079 m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
2080 else:\r
2081 EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
2082\r
2083 # Add Library hash\r
2084 if self.LibraryAutoGenList:\r
2085 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
2086 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
2087 not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
2088 Lib.GenPreMakefileHash(gDict)\r
2089 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
2090\r
2091 # Add Module self\r
2092 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
2093\r
94459080 2094 with GlobalData.cache_lock:\r
0e7e7a26
SS
2095 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2096 IR.PreMakefileHashHexDigest = m.hexdigest()\r
2097 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2098\r
2099 return gDict[(self.MetaFile.Path, self.Arch)]\r
2100\r
2101 def GenMakeHeaderFilesHash(self, gDict):\r
2102 # Early exit if module or library has been hashed and is in memory\r
2103 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2104 gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
2105 return gDict[(self.MetaFile.Path, self.Arch)]\r
2106\r
94459080
SS
2107 # skip if the module cache already crashed\r
2108 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2109 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2110 return\r
2111\r
0e7e7a26
SS
2112 # skip binary module\r
2113 if self.IsBinaryModule:\r
2114 return\r
2115\r
2116 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2117 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
2118 if self.IsLibrary:\r
2119 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
2120 self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
2121 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
2122 self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
2123 self.CreateCodeFile()\r
2124 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2125 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
e3c8311f 2126 self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.Path, self.Arch),[]))\r
0e7e7a26
SS
2127\r
2128 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2129 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
2130 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
2131 EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2132 return\r
2133\r
2134 DependencyFileSet = set()\r
2135 # Add Makefile\r
2136 if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
2137 DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
2138 else:\r
2139 EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2140\r
2141 # Add header files\r
2142 if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
2143 for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
2144 DependencyFileSet.add(File)\r
2145 else:\r
2146 EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2147\r
2148 # Add AutoGen files\r
2149 if self.AutoGenFileList:\r
2150 for File in set(self.AutoGenFileList):\r
2151 DependencyFileSet.add(File)\r
2152\r
2153 # Caculate all above dependency files hash\r
2154 # Initialze hash object\r
2155 FileList = []\r
2156 m = hashlib.md5()\r
2157 for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
2158 if not os.path.exists(str(File)):\r
2159 EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
2160 continue\r
2161 f = open(str(File), 'rb')\r
2162 Content = f.read()\r
2163 f.close()\r
2164 m.update(Content)\r
2165 FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
2166\r
94459080 2167 with GlobalData.cache_lock:\r
0e7e7a26
SS
2168 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2169 IR.AutoGenFileList = self.AutoGenFileList.keys()\r
2170 IR.MakeHeaderFilesHashChain = FileList\r
2171 IR.MakeHeaderFilesHashDigest = m.digest()\r
2172 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2173\r
2174 return gDict[(self.MetaFile.Path, self.Arch)]\r
2175\r
2176 def GenMakeHash(self, gDict):\r
2177 # Early exit if module or library has been hashed and is in memory\r
2178 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2179 gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
2180 return gDict[(self.MetaFile.Path, self.Arch)]\r
2181\r
94459080
SS
2182 # skip if the module cache already crashed\r
2183 if (self.MetaFile.Path, self.Arch) in gDict and \\r
2184 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2185 return\r
2186\r
0e7e7a26
SS
2187 # skip binary module\r
2188 if self.IsBinaryModule:\r
2189 return\r
2190\r
2191 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2192 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
2193 self.GenModuleFilesHash(gDict)\r
2194 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
2195 self.GenMakeHeaderFilesHash(gDict)\r
2196\r
2197 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2198 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
2199 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
2200 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
2201 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
2202 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2203 return\r
2204\r
2205 # Initialze hash object\r
2206 m = hashlib.md5()\r
2207 MakeHashChain = []\r
2208\r
2209 # Add hash of makefile and dependency header files\r
2210 m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
2211 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
2212 New.sort(key=lambda x: str(x))\r
2213 MakeHashChain += New\r
2214\r
2215 # Add Library hash\r
2216 if self.LibraryAutoGenList:\r
2217 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
2218 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
2219 not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
2220 Lib.GenMakeHash(gDict)\r
2221 if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
2222 print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
2223 continue\r
2224 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
2225 New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
2226 New.sort(key=lambda x: str(x))\r
2227 MakeHashChain += New\r
2228\r
2229 # Add Module self\r
2230 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
2231 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
2232 New.sort(key=lambda x: str(x))\r
2233 MakeHashChain += New\r
2234\r
94459080 2235 with GlobalData.cache_lock:\r
0e7e7a26
SS
2236 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2237 IR.MakeHashDigest = m.digest()\r
2238 IR.MakeHashHexDigest = m.hexdigest()\r
2239 IR.MakeHashChain = MakeHashChain\r
2240 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2241\r
2242 return gDict[(self.MetaFile.Path, self.Arch)]\r
2243\r
2244 ## Decide whether we can skip the left autogen and make process\r
2245 def CanSkipbyPreMakefileCache(self, gDict):\r
2246 if not GlobalData.gBinCacheSource:\r
2247 return False\r
2248\r
94459080
SS
2249 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
2250 return True\r
2251\r
2252 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2253 return False\r
2254\r
0e7e7a26
SS
2255 # If Module is binary, do not skip by cache\r
2256 if self.IsBinaryModule:\r
2257 return False\r
2258\r
2259 # .inc is contains binary information so do not skip by hash as well\r
2260 for f_ext in self.SourceFileList:\r
2261 if '.inc' in str(f_ext):\r
2262 return False\r
2263\r
2264 # Get the module hash values from stored cache and currrent build\r
2265 # then check whether cache hit based on the hash values\r
2266 # if cache hit, restore all the files from cache\r
2267 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2268 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
2269\r
2270 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
2271 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
2272 if not os.path.exists(ModuleHashPair):\r
2273 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
94459080
SS
2274 with GlobalData.cache_lock:\r
2275 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2276 IR.CacheCrash = True\r
2277 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
0e7e7a26
SS
2278 return False\r
2279\r
2280 try:\r
94459080
SS
2281 with open(ModuleHashPair, 'r') as f:\r
2282 ModuleHashPairList = json.load(f)\r
0e7e7a26
SS
2283 except:\r
2284 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
2285 return False\r
2286\r
2287 self.GenPreMakefileHash(gDict)\r
2288 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2289 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
2290 EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2291 return False\r
2292\r
2293 MakeHashStr = None\r
2294 CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
2295 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
2296 if PreMakefileHash == CurrentPreMakeHash:\r
2297 MakeHashStr = str(MakeHash)\r
2298\r
2299 if not MakeHashStr:\r
2300 return False\r
2301\r
2302 TargetHashDir = path.join(FileDir, MakeHashStr)\r
2303 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
2304\r
2305 if not os.path.exists(TargetHashDir):\r
2306 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
2307 return False\r
2308\r
2309 for root, dir, files in os.walk(TargetHashDir):\r
2310 for f in files:\r
2311 File = path.join(root, f)\r
2312 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
2313 if os.path.exists(TargetFfsHashDir):\r
2314 for root, dir, files in os.walk(TargetFfsHashDir):\r
2315 for f in files:\r
2316 File = path.join(root, f)\r
2317 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
2318\r
2319 if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
2320 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
2321\r
94459080 2322 with GlobalData.cache_lock:\r
0e7e7a26
SS
2323 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2324 IR.PreMakeCacheHit = True\r
2325 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2326 print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
2327 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2328 return True\r
2329\r
2330 ## Decide whether we can skip the make process\r
2331 def CanSkipbyMakeCache(self, gDict):\r
2332 if not GlobalData.gBinCacheSource:\r
2333 return False\r
2334\r
94459080
SS
2335 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
2336 return True\r
2337\r
2338 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2339 return False\r
2340\r
0e7e7a26
SS
2341 # If Module is binary, do not skip by cache\r
2342 if self.IsBinaryModule:\r
2343 print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
2344 return False\r
2345\r
2346 # .inc is contains binary information so do not skip by hash as well\r
2347 for f_ext in self.SourceFileList:\r
2348 if '.inc' in str(f_ext):\r
94459080 2349 with GlobalData.cache_lock:\r
0e7e7a26
SS
2350 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2351 IR.MakeCacheHit = False\r
2352 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2353 print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
2354 return False\r
2355\r
2356 # Get the module hash values from stored cache and currrent build\r
2357 # then check whether cache hit based on the hash values\r
2358 # if cache hit, restore all the files from cache\r
2359 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2360 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
2361\r
2362 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
2363 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
2364 if not os.path.exists(ModuleHashPair):\r
2365 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
94459080
SS
2366 with GlobalData.cache_lock:\r
2367 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2368 IR.CacheCrash = True\r
2369 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
0e7e7a26
SS
2370 return False\r
2371\r
2372 try:\r
94459080
SS
2373 with open(ModuleHashPair, 'r') as f:\r
2374 ModuleHashPairList = json.load(f)\r
0e7e7a26
SS
2375 except:\r
2376 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
2377 return False\r
2378\r
2379 self.GenMakeHash(gDict)\r
2380 if not (self.MetaFile.Path, self.Arch) in gDict or \\r
2381 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
2382 EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
2383 return False\r
2384\r
2385 MakeHashStr = None\r
2386 CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
2387 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
2388 if MakeHash == CurrentMakeHash:\r
2389 MakeHashStr = str(MakeHash)\r
2390\r
2391 if not MakeHashStr:\r
2392 print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
2393 return False\r
2394\r
2395 TargetHashDir = path.join(FileDir, MakeHashStr)\r
2396 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
2397 if not os.path.exists(TargetHashDir):\r
2398 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
2399 return False\r
2400\r
2401 for root, dir, files in os.walk(TargetHashDir):\r
2402 for f in files:\r
2403 File = path.join(root, f)\r
2404 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
2405\r
2406 if os.path.exists(TargetFfsHashDir):\r
2407 for root, dir, files in os.walk(TargetFfsHashDir):\r
2408 for f in files:\r
2409 File = path.join(root, f)\r
2410 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
2411\r
2412 if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
2413 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
94459080 2414 with GlobalData.cache_lock:\r
0e7e7a26
SS
2415 IR = gDict[(self.MetaFile.Path, self.Arch)]\r
2416 IR.MakeCacheHit = True\r
2417 gDict[(self.MetaFile.Path, self.Arch)] = IR\r
2418 print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
2419 return True\r
2420\r
56c786b0
SS
2421 ## Show the first file name which causes cache miss\r
2422 def PrintFirstMakeCacheMissFile(self, gDict):\r
2423 if not GlobalData.gBinCacheSource:\r
2424 return\r
2425\r
94459080
SS
2426 # skip if the module cache already crashed\r
2427 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:\r
2428 return\r
2429\r
56c786b0
SS
2430 # skip binary module\r
2431 if self.IsBinaryModule:\r
2432 return\r
2433\r
2434 if not (self.MetaFile.Path, self.Arch) in gDict:\r
2435 return\r
2436\r
2437 # Only print cache miss file for the MakeCache not hit module\r
2438 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
2439 return\r
2440\r
2441 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
2442 EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2443 return\r
2444\r
2445 # Find the cache dir name through the .ModuleHashPair file info\r
2446 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2447\r
2448 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
2449 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
2450 if not os.path.exists(ModuleHashPair):\r
2451 EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2452 return\r
2453\r
2454 try:\r
94459080
SS
2455 with open(ModuleHashPair, 'r') as f:\r
2456 ModuleHashPairList = json.load(f)\r
56c786b0
SS
2457 except:\r
2458 EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2459 return\r
2460\r
2461 MakeHashSet = set()\r
2462 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
2463 TargetHashDir = path.join(FileDir, str(MakeHash))\r
2464 if os.path.exists(TargetHashDir):\r
2465 MakeHashSet.add(MakeHash)\r
2466 if not MakeHashSet:\r
2467 EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2468 return\r
2469\r
2470 TargetHash = list(MakeHashSet)[0]\r
2471 TargetHashDir = path.join(FileDir, str(TargetHash))\r
2472 if len(MakeHashSet) > 1 :\r
2473 EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))\r
2474\r
2475 ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')\r
2476 if os.path.exists(ListFile):\r
2477 try:\r
2478 f = open(ListFile, 'r')\r
2479 CachedList = json.load(f)\r
2480 f.close()\r
2481 except:\r
2482 EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)\r
2483 return\r
2484 else:\r
2485 EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)\r
2486 return\r
2487\r
2488 CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain\r
2489 for idx, (file, hash) in enumerate (CurrentList):\r
2490 (filecached, hashcached) = CachedList[idx]\r
2491 if file != filecached:\r
2492 EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))\r
2493 break\r
2494 if hash != hashcached:\r
2495 EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))\r
2496 break\r
2497\r
2498 return True\r
2499\r
e8449e1d 2500 ## Decide whether we can skip the ModuleAutoGen process\r
0e7e7a26 2501 def CanSkipbyCache(self, gDict):\r
e8449e1d 2502 # Hashing feature is off\r
0e7e7a26 2503 if not GlobalData.gBinCacheSource:\r
e8449e1d
FB
2504 return False\r
2505\r
0e7e7a26
SS
2506 if self in GlobalData.gBuildHashSkipTracking:\r
2507 return GlobalData.gBuildHashSkipTracking[self]\r
e8449e1d
FB
2508\r
2509 # If library or Module is binary do not skip by hash\r
2510 if self.IsBinaryModule:\r
0e7e7a26 2511 GlobalData.gBuildHashSkipTracking[self] = False\r
e8449e1d
FB
2512 return False\r
2513\r
2514 # .inc is contains binary information so do not skip by hash as well\r
2515 for f_ext in self.SourceFileList:\r
2516 if '.inc' in str(f_ext):\r
0e7e7a26 2517 GlobalData.gBuildHashSkipTracking[self] = False\r
e8449e1d
FB
2518 return False\r
2519\r
0e7e7a26
SS
2520 if not (self.MetaFile.Path, self.Arch) in gDict:\r
2521 return False\r
2522\r
2523 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
2524 GlobalData.gBuildHashSkipTracking[self] = True\r
e8449e1d
FB
2525 return True\r
2526\r
0e7e7a26
SS
2527 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
2528 GlobalData.gBuildHashSkipTracking[self] = True\r
2529 return True\r
e8449e1d 2530\r
0e7e7a26 2531 return False\r
e8449e1d
FB
2532\r
2533 ## Decide whether we can skip the ModuleAutoGen process\r
2534 # If any source file is newer than the module than we cannot skip\r
2535 #\r
2536 def CanSkip(self):\r
0e7e7a26
SS
2537 # Don't skip if cache feature enabled\r
2538 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r
2539 return False\r
e8449e1d
FB
2540 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
2541 return True\r
2542 if not os.path.exists(self.TimeStampPath):\r
2543 return False\r
2544 #last creation time of the module\r
2545 DstTimeStamp = os.stat(self.TimeStampPath)[8]\r
2546\r
2547 SrcTimeStamp = self.Workspace._SrcTimeStamp\r
2548 if SrcTimeStamp > DstTimeStamp:\r
2549 return False\r
2550\r
2551 with open(self.TimeStampPath,'r') as f:\r
2552 for source in f:\r
2553 source = source.rstrip('\n')\r
2554 if not os.path.exists(source):\r
2555 return False\r
2556 if source not in ModuleAutoGen.TimeDict :\r
2557 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r
2558 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r
2559 return False\r
2560 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r
2561 return True\r
2562\r
2563 @cached_property\r
2564 def TimeStampPath(self):\r
2565 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r