]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: Remove Makefile/MakefileName fields
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
CommitLineData
e8449e1d
FB
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8from AutoGen.AutoGen import AutoGen\r
fc8b8dea 9from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath\r
e8449e1d
FB
10from Common.BuildToolError import *\r
11from Common.DataType import *\r
12from Common.Misc import *\r
13from Common.StringUtils import NormPath,GetSplitList\r
14from collections import defaultdict\r
15from Workspace.WorkspaceCommon import OrderedListDict\r
16import os.path as path\r
17import copy\r
18import hashlib\r
19from . import InfSectionParser\r
20from . import GenC\r
21from . import GenMake\r
22from . import GenDepex\r
23from io import BytesIO\r
24from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r
25from Workspace.MetaFileCommentParser import UsageList\r
26from .GenPcdDb import CreatePcdDatabaseCode\r
27from Common.caching import cached_class_function\r
28from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
0e7e7a26 29import json\r
94459080 30import tempfile\r
e8449e1d
FB
31\r
32## Mapping Makefile type\r
33gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
34#\r
35# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
36# is the former use /I , the Latter used -I to specify include directories\r
37#\r
38gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
39gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
40\r
41## default file name for AutoGen\r
42gAutoGenCodeFileName = "AutoGen.c"\r
43gAutoGenHeaderFileName = "AutoGen.h"\r
44gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r
45gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r
46gAutoGenDepexFileName = "%(module_name)s.depex"\r
47gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r
48gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r
49gInfSpecVersion = "0x00010017"\r
50\r
51#\r
52# Match name = variable\r
53#\r
54gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r
55#\r
56# The format of guid in efivarstore statement likes following and must be correct:\r
57# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r
58#\r
59gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r
60\r
61#\r
62# Template string to generic AsBuilt INF\r
63#\r
64gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r
65\r
66# DO NOT EDIT\r
67# FILE auto-generated\r
68\r
69[Defines]\r
70 INF_VERSION = ${module_inf_version}\r
71 BASE_NAME = ${module_name}\r
72 FILE_GUID = ${module_guid}\r
73 MODULE_TYPE = ${module_module_type}${BEGIN}\r
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}\r
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r
82 SHADOW = ${module_shadow}${END}${BEGIN}\r
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r
88 SPEC = ${module_spec}${END}${BEGIN}\r
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r
90 MODULE_UNI_FILE = ${module_uni_file}${END}\r
91\r
92[Packages.${module_arch}]${BEGIN}\r
93 ${package_item}${END}\r
94\r
95[Binaries.${module_arch}]${BEGIN}\r
96 ${binary_item}${END}\r
97\r
98[PatchPcd.${module_arch}]${BEGIN}\r
99 ${patchablepcd_item}\r
100${END}\r
101\r
102[Protocols.${module_arch}]${BEGIN}\r
103 ${protocol_item}\r
104${END}\r
105\r
106[Ppis.${module_arch}]${BEGIN}\r
107 ${ppi_item}\r
108${END}\r
109\r
110[Guids.${module_arch}]${BEGIN}\r
111 ${guid_item}\r
112${END}\r
113\r
114[PcdEx.${module_arch}]${BEGIN}\r
115 ${pcd_item}\r
116${END}\r
117\r
118[LibraryClasses.${module_arch}]\r
119## @LIB_INSTANCES${BEGIN}\r
120# ${libraryclasses_item}${END}\r
121\r
122${depexsection_item}\r
123\r
124${userextension_tianocore_item}\r
125\r
126${tail_comments}\r
127\r
128[BuildOptions.${module_arch}]\r
129## @AsBuilt${BEGIN}\r
130## ${flags_item}${END}\r
131""")\r
132#\r
133# extend lists contained in a dictionary with lists stored in another dictionary\r
134# if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r
135#\r
136def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r
137 for Key in CopyFromDict:\r
138 CopyToDict[Key].extend(CopyFromDict[Key])\r
139\r
140# Create a directory specified by a set of path elements and return the full path\r
141def _MakeDir(PathList):\r
142 RetVal = path.join(*PathList)\r
143 CreateDirectory(RetVal)\r
144 return RetVal\r
145\r
146#\r
147# Convert string to C format array\r
148#\r
149def _ConvertStringToByteArray(Value):\r
150 Value = Value.strip()\r
151 if not Value:\r
152 return None\r
153 if Value[0] == '{':\r
154 if not Value.endswith('}'):\r
155 return None\r
156 Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r
157 ValFields = Value.split(',')\r
158 try:\r
159 for Index in range(len(ValFields)):\r
160 ValFields[Index] = str(int(ValFields[Index], 0))\r
161 except ValueError:\r
162 return None\r
163 Value = '{' + ','.join(ValFields) + '}'\r
164 return Value\r
165\r
166 Unicode = False\r
167 if Value.startswith('L"'):\r
168 if not Value.endswith('"'):\r
169 return None\r
170 Value = Value[1:]\r
171 Unicode = True\r
172 elif not Value.startswith('"') or not Value.endswith('"'):\r
173 return None\r
174\r
175 Value = eval(Value) # translate escape character\r
176 NewValue = '{'\r
177 for Index in range(0, len(Value)):\r
178 if Unicode:\r
179 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r
180 else:\r
181 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r
182 Value = NewValue + '0}'\r
183 return Value\r
184\r
185## ModuleAutoGen class\r
186#\r
187# This class encapsules the AutoGen behaviors for the build tools. In addition to\r
188# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r
189# to the [depex] section in module's inf file.\r
190#\r
191class ModuleAutoGen(AutoGen):\r
192 # call super().__init__ then call the worker function with different parameter count\r
193 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
194 if not hasattr(self, "_Init"):\r
195 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r
196 self._Init = True\r
197\r
198 ## Cache the timestamps of metafiles of every module in a class attribute\r
199 #\r
200 TimeDict = {}\r
201\r
202 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r
203# check if this module is employed by active platform\r
204 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):\r
205 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r
206 % (MetaFile, Arch))\r
207 return None\r
208 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r
209\r
210 ## Initialize ModuleAutoGen\r
211 #\r
212 # @param Workspace EdkIIWorkspaceBuild object\r
213 # @param ModuleFile The path of module file\r
214 # @param Target Build target (DEBUG, RELEASE)\r
215 # @param Toolchain Name of tool chain\r
216 # @param Arch The arch the module supports\r
217 # @param PlatformFile Platform meta-file\r
218 #\r
219 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):\r
220 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r
221 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r
222\r
a000d5d1 223 self.Workspace = Workspace\r
e8449e1d
FB
224 self.WorkspaceDir = ""\r
225 self.PlatformInfo = None\r
226 self.DataPipe = DataPipe\r
227 self.__init_platform_info__()\r
228 self.MetaFile = ModuleFile\r
229 self.SourceDir = self.MetaFile.SubDir\r
230 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r
231\r
232 self.ToolChain = Toolchain\r
233 self.BuildTarget = Target\r
234 self.Arch = Arch\r
235 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r
236 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r
237\r
238 self.IsCodeFileCreated = False\r
239 self.IsAsBuiltInfCreated = False\r
240 self.DepexGenerated = False\r
241\r
242 self.BuildDatabase = self.Workspace.BuildDatabase\r
243 self.BuildRuleOrder = None\r
244 self.BuildTime = 0\r
245\r
246 self._GuidComments = OrderedListDict()\r
247 self._ProtocolComments = OrderedListDict()\r
248 self._PpiComments = OrderedListDict()\r
249 self._BuildTargets = None\r
250 self._IntroBuildTargetList = None\r
251 self._FinalBuildTargetList = None\r
252 self._FileTypes = None\r
253\r
254 self.AutoGenDepSet = set()\r
255 self.ReferenceModules = []\r
256 self.ConstPcd = {}\r
0e7e7a26 257 self.FileDependCache = {}\r
e8449e1d
FB
258\r
259 def __init_platform_info__(self):\r
260 pinfo = self.DataPipe.Get("P_Info")\r
e8449e1d
FB
261 self.WorkspaceDir = pinfo.get("WorkspaceDir")\r
262 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)\r
263 ## hash() operator of ModuleAutoGen\r
264 #\r
265 # The module file path and arch string will be used to represent\r
266 # hash value of this object\r
267 #\r
268 # @retval int Hash value of the module file path and arch\r
269 #\r
270 @cached_class_function\r
271 def __hash__(self):\r
78fb6b0e 272 return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))\r
e8449e1d
FB
273 def __repr__(self):\r
274 return "%s [%s]" % (self.MetaFile, self.Arch)\r
275\r
276 # Get FixedAtBuild Pcds of this Module\r
277 @cached_property\r
278 def FixedAtBuildPcds(self):\r
279 RetVal = []\r
280 for Pcd in self.ModulePcdList:\r
281 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r
282 continue\r
283 if Pcd not in RetVal:\r
284 RetVal.append(Pcd)\r
285 return RetVal\r
286\r
287 @cached_property\r
288 def FixedVoidTypePcds(self):\r
289 RetVal = {}\r
290 for Pcd in self.FixedAtBuildPcds:\r
291 if Pcd.DatumType == TAB_VOID:\r
292 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:\r
293 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue\r
294 return RetVal\r
295\r
296 @property\r
297 def UniqueBaseName(self):\r
298 ModuleNames = self.DataPipe.Get("M_Name")\r
299 if not ModuleNames:\r
300 return self.Name\r
76e12fa3 301 return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r
e8449e1d
FB
302\r
303 # Macros could be used in build_rule.txt (also Makefile)\r
304 @cached_property\r
305 def Macros(self):\r
306 return OrderedDict((\r
307 ("WORKSPACE" ,self.WorkspaceDir),\r
308 ("MODULE_NAME" ,self.Name),\r
309 ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r
310 ("MODULE_GUID" ,self.Guid),\r
311 ("MODULE_VERSION" ,self.Version),\r
312 ("MODULE_TYPE" ,self.ModuleType),\r
313 ("MODULE_FILE" ,str(self.MetaFile)),\r
314 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r
315 ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r
316 ("MODULE_DIR" ,self.SourceDir),\r
317 ("BASE_NAME" ,self.Name),\r
318 ("ARCH" ,self.Arch),\r
319 ("TOOLCHAIN" ,self.ToolChain),\r
320 ("TOOLCHAIN_TAG" ,self.ToolChain),\r
321 ("TOOL_CHAIN_TAG" ,self.ToolChain),\r
322 ("TARGET" ,self.BuildTarget),\r
323 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r
324 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
325 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r
326 ("MODULE_BUILD_DIR" ,self.BuildDir),\r
327 ("OUTPUT_DIR" ,self.OutputDir),\r
328 ("DEBUG_DIR" ,self.DebugDir),\r
329 ("DEST_DIR_OUTPUT" ,self.OutputDir),\r
330 ("DEST_DIR_DEBUG" ,self.DebugDir),\r
331 ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r
332 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r
333 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r
334 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r
335 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r
336 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r
337 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r
338 ))\r
339\r
340 ## Return the module build data object\r
341 @cached_property\r
342 def Module(self):\r
343 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
344\r
345 ## Return the module name\r
346 @cached_property\r
347 def Name(self):\r
348 return self.Module.BaseName\r
349\r
350 ## Return the module DxsFile if exist\r
351 @cached_property\r
352 def DxsFile(self):\r
353 return self.Module.DxsFile\r
354\r
355 ## Return the module meta-file GUID\r
356 @cached_property\r
357 def Guid(self):\r
358 #\r
359 # To build same module more than once, the module path with FILE_GUID overridden has\r
360 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r
361 # in DSC. The overridden GUID can be retrieved from file name\r
362 #\r
363 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r
364 #\r
365 # Length of GUID is 36\r
366 #\r
367 return os.path.basename(self.MetaFile.Path)[:36]\r
368 return self.Module.Guid\r
369\r
370 ## Return the module version\r
371 @cached_property\r
372 def Version(self):\r
373 return self.Module.Version\r
374\r
375 ## Return the module type\r
376 @cached_property\r
377 def ModuleType(self):\r
378 return self.Module.ModuleType\r
379\r
380 ## Return the component type (for Edk.x style of module)\r
381 @cached_property\r
382 def ComponentType(self):\r
383 return self.Module.ComponentType\r
384\r
385 ## Return the build type\r
386 @cached_property\r
387 def BuildType(self):\r
388 return self.Module.BuildType\r
389\r
390 ## Return the PCD_IS_DRIVER setting\r
391 @cached_property\r
392 def PcdIsDriver(self):\r
393 return self.Module.PcdIsDriver\r
394\r
395 ## Return the autogen version, i.e. module meta-file version\r
396 @cached_property\r
397 def AutoGenVersion(self):\r
398 return self.Module.AutoGenVersion\r
399\r
400 ## Check if the module is library or not\r
401 @cached_property\r
402 def IsLibrary(self):\r
403 return bool(self.Module.LibraryClass)\r
404\r
405 ## Check if the module is binary module or not\r
406 @cached_property\r
407 def IsBinaryModule(self):\r
408 return self.Module.IsBinaryModule\r
409\r
410 ## Return the directory to store intermediate files of the module\r
411 @cached_property\r
412 def BuildDir(self):\r
413 return _MakeDir((\r
414 self.PlatformInfo.BuildDir,\r
415 self.Arch,\r
416 self.SourceDir,\r
417 self.MetaFile.BaseName\r
418 ))\r
419\r
420 ## Return the directory to store the intermediate object files of the module\r
421 @cached_property\r
422 def OutputDir(self):\r
423 return _MakeDir((self.BuildDir, "OUTPUT"))\r
424\r
425 ## Return the directory path to store ffs file\r
426 @cached_property\r
427 def FfsOutputDir(self):\r
428 if GlobalData.gFdfParser:\r
429 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
430 return ''\r
431\r
432 ## Return the directory to store auto-gened source files of the module\r
433 @cached_property\r
434 def DebugDir(self):\r
435 return _MakeDir((self.BuildDir, "DEBUG"))\r
436\r
437 ## Return the path of custom file\r
438 @cached_property\r
439 def CustomMakefile(self):\r
440 RetVal = {}\r
441 for Type in self.Module.CustomMakefile:\r
442 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r
443 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
444 RetVal[MakeType] = File\r
445 return RetVal\r
446\r
447 ## Return the directory of the makefile\r
448 #\r
449 # @retval string The directory string of module's makefile\r
450 #\r
451 @cached_property\r
452 def MakeFileDir(self):\r
453 return self.BuildDir\r
454\r
455 ## Return build command string\r
456 #\r
457 # @retval string Build command string\r
458 #\r
459 @cached_property\r
460 def BuildCommand(self):\r
461 return self.PlatformInfo.BuildCommand\r
462\r
bf1ea933
FZ
463 ## Get Module package and Platform package\r
464 #\r
465 # @retval list The list of package object\r
466 #\r
467 @cached_property\r
468 def PackageList(self):\r
469 PkagList = []\r
470 if self.Module.Packages:\r
471 PkagList.extend(self.Module.Packages)\r
472 Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
473 for Package in Platform.Packages:\r
474 if Package in PkagList:\r
475 continue\r
476 PkagList.append(Package)\r
477 return PkagList\r
478\r
479 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
e8449e1d
FB
480 #\r
481 # @retval list The list of package object\r
482 #\r
483 @cached_property\r
484 def DerivedPackageList(self):\r
485 PackageList = []\r
bf1ea933
FZ
486 PackageList.extend(self.PackageList)\r
487 for M in self.DependentLibraryList:\r
e8449e1d
FB
488 for Package in M.Packages:\r
489 if Package in PackageList:\r
490 continue\r
491 PackageList.append(Package)\r
492 return PackageList\r
493\r
494 ## Get the depex string\r
495 #\r
496 # @return : a string contain all depex expression.\r
497 def _GetDepexExpresionString(self):\r
498 DepexStr = ''\r
499 DepexList = []\r
500 ## DPX_SOURCE IN Define section.\r
501 if self.Module.DxsFile:\r
502 return DepexStr\r
503 for M in [self.Module] + self.DependentLibraryList:\r
504 Filename = M.MetaFile.Path\r
505 InfObj = InfSectionParser.InfSectionParser(Filename)\r
506 DepexExpressionList = InfObj.GetDepexExpresionList()\r
507 for DepexExpression in DepexExpressionList:\r
508 for key in DepexExpression:\r
509 Arch, ModuleType = key\r
510 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r
511 # the type of build module is USER_DEFINED.\r
512 # All different DEPEX section tags would be copied into the As Built INF file\r
513 # and there would be separate DEPEX section tags\r
514 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
515 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r
516 DepexList.append({(Arch, ModuleType): DepexExpr})\r
517 else:\r
518 if Arch.upper() == TAB_ARCH_COMMON or \\r
519 (Arch.upper() == self.Arch.upper() and \\r
520 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r
521 DepexList.append({(Arch, ModuleType): DepexExpr})\r
522\r
523 #the type of build module is USER_DEFINED.\r
524 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r
525 for Depex in DepexList:\r
526 for key in Depex:\r
527 DepexStr += '[Depex.%s.%s]\n' % key\r
528 DepexStr += '\n'.join('# '+ val for val in Depex[key])\r
529 DepexStr += '\n\n'\r
530 if not DepexStr:\r
531 return '[Depex.%s]\n' % self.Arch\r
532 return DepexStr\r
533\r
534 #the type of build module not is USER_DEFINED.\r
535 Count = 0\r
536 for Depex in DepexList:\r
537 Count += 1\r
538 if DepexStr != '':\r
539 DepexStr += ' AND '\r
540 DepexStr += '('\r
541 for D in Depex.values():\r
542 DepexStr += ' '.join(val for val in D)\r
543 Index = DepexStr.find('END')\r
544 if Index > -1 and Index == len(DepexStr) - 3:\r
545 DepexStr = DepexStr[:-3]\r
546 DepexStr = DepexStr.strip()\r
547 DepexStr += ')'\r
548 if Count == 1:\r
549 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r
550 if not DepexStr:\r
551 return '[Depex.%s]\n' % self.Arch\r
552 return '[Depex.%s]\n# ' % self.Arch + DepexStr\r
553\r
554 ## Merge dependency expression\r
555 #\r
556 # @retval list The token list of the dependency expression after parsed\r
557 #\r
558 @cached_property\r
559 def DepexList(self):\r
560 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
561 return {}\r
562\r
563 DepexList = []\r
564 #\r
565 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r
566 #\r
567 FixedVoidTypePcds = {}\r
568 for M in [self] + self.LibraryAutoGenList:\r
569 FixedVoidTypePcds.update(M.FixedVoidTypePcds)\r
570 for M in [self] + self.LibraryAutoGenList:\r
571 Inherited = False\r
572 for D in M.Module.Depex[self.Arch, self.ModuleType]:\r
573 if DepexList != []:\r
574 DepexList.append('AND')\r
575 DepexList.append('(')\r
576 #replace D with value if D is FixedAtBuild PCD\r
577 NewList = []\r
578 for item in D:\r
579 if '.' not in item:\r
580 NewList.append(item)\r
581 else:\r
582 try:\r
583 Value = FixedVoidTypePcds[item]\r
584 if len(Value.split(',')) != 16:\r
585 EdkLogger.error("build", FORMAT_INVALID,\r
586 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r
587 NewList.append(Value)\r
588 except:\r
589 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r
590\r
591 DepexList.extend(NewList)\r
592 if DepexList[-1] == 'END': # no need of a END at this time\r
593 DepexList.pop()\r
594 DepexList.append(')')\r
595 Inherited = True\r
596 if Inherited:\r
597 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))\r
598 if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r
599 break\r
600 if len(DepexList) > 0:\r
601 EdkLogger.verbose('')\r
602 return {self.ModuleType:DepexList}\r
603\r
604 ## Merge dependency expression\r
605 #\r
606 # @retval list The token list of the dependency expression after parsed\r
607 #\r
608 @cached_property\r
609 def DepexExpressionDict(self):\r
610 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
611 return {}\r
612\r
613 DepexExpressionString = ''\r
614 #\r
615 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r
616 #\r
617 for M in [self.Module] + self.DependentLibraryList:\r
618 Inherited = False\r
619 for D in M.DepexExpression[self.Arch, self.ModuleType]:\r
620 if DepexExpressionString != '':\r
621 DepexExpressionString += ' AND '\r
622 DepexExpressionString += '('\r
623 DepexExpressionString += D\r
624 DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r
625 DepexExpressionString += ')'\r
626 Inherited = True\r
627 if Inherited:\r
628 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r
629 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r
630 break\r
631 if len(DepexExpressionString) > 0:\r
632 EdkLogger.verbose('')\r
633\r
634 return {self.ModuleType:DepexExpressionString}\r
635\r
636 # Get the tiano core user extension, it is contain dependent library.\r
637 # @retval: a list contain tiano core userextension.\r
638 #\r
639 def _GetTianoCoreUserExtensionList(self):\r
640 TianoCoreUserExtentionList = []\r
641 for M in [self.Module] + self.DependentLibraryList:\r
642 Filename = M.MetaFile.Path\r
643 InfObj = InfSectionParser.InfSectionParser(Filename)\r
644 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r
645 for TianoCoreUserExtent in TianoCoreUserExtenList:\r
646 for Section in TianoCoreUserExtent:\r
647 ItemList = Section.split(TAB_SPLIT)\r
648 Arch = self.Arch\r
649 if len(ItemList) == 4:\r
650 Arch = ItemList[3]\r
651 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r
652 TianoCoreList = []\r
653 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r
654 TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r
655 TianoCoreList.append('\n')\r
656 TianoCoreUserExtentionList.append(TianoCoreList)\r
657\r
658 return TianoCoreUserExtentionList\r
659\r
660 ## Return the list of specification version required for the module\r
661 #\r
662 # @retval list The list of specification defined in module file\r
663 #\r
664 @cached_property\r
665 def Specification(self):\r
666 return self.Module.Specification\r
667\r
668 ## Tool option for the module build\r
669 #\r
670 # @param PlatformInfo The object of PlatformBuildInfo\r
671 # @retval dict The dict containing valid options\r
672 #\r
673 @cached_property\r
674 def BuildOption(self):\r
675 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r
676 if self.BuildRuleOrder:\r
677 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r
678 return RetVal\r
679\r
680 ## Get include path list from tool option for the module build\r
681 #\r
682 # @retval list The include path list\r
683 #\r
684 @cached_property\r
685 def BuildOptionIncPathList(self):\r
686 #\r
687 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
688 # is the former use /I , the Latter used -I to specify include directories\r
689 #\r
690 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r
691 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r
692 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
693 BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r
694 else:\r
695 #\r
696 # New ToolChainFamily, don't known whether there is option to specify include directories\r
697 #\r
698 return []\r
699\r
700 RetVal = []\r
701 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r
702 try:\r
703 FlagOption = self.BuildOption[Tool]['FLAGS']\r
704 except KeyError:\r
705 FlagOption = ''\r
706\r
707 if self.ToolChainFamily != 'RVCT':\r
708 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
709 else:\r
710 #\r
711 # RVCT may specify a list of directory seperated by commas\r
712 #\r
713 IncPathList = []\r
714 for Path in BuildOptIncludeRegEx.findall(FlagOption):\r
715 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r
716 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r
717\r
718 #\r
719 # EDK II modules must not reference header files outside of the packages they depend on or\r
720 # within the module's directory tree. Report error if violation.\r
721 #\r
722 if GlobalData.gDisableIncludePathCheck == False:\r
723 for Path in IncPathList:\r
724 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
725 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
726 EdkLogger.error("build",\r
727 PARAMETER_INVALID,\r
728 ExtraData=ErrMsg,\r
729 File=str(self.MetaFile))\r
730 RetVal += IncPathList\r
731 return RetVal\r
732\r
733 ## Return a list of files which can be built from source\r
734 #\r
735 # What kind of files can be built is determined by build rules in\r
736 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r
737 #\r
738 @cached_property\r
739 def SourceFileList(self):\r
740 RetVal = []\r
741 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r
742 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r
743 for F in self.Module.Sources:\r
744 # match tool chain\r
745 if F.TagName not in ToolChainTagSet:\r
746 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r
747 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r
748 continue\r
749 # match tool chain family or build rule family\r
750 if F.ToolChainFamily not in ToolChainFamilySet:\r
751 EdkLogger.debug(\r
752 EdkLogger.DEBUG_0,\r
753 "The file [%s] must be built by tools of [%s], " \\r
754 "but current toolchain family is [%s], buildrule family is [%s]" \\r
755 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r
756 continue\r
757\r
758 # add the file path into search path list for file including\r
759 if F.Dir not in self.IncludePathList:\r
760 self.IncludePathList.insert(0, F.Dir)\r
761 RetVal.append(F)\r
762\r
763 self._MatchBuildRuleOrder(RetVal)\r
764\r
765 for F in RetVal:\r
766 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r
767 return RetVal\r
768\r
769 def _MatchBuildRuleOrder(self, FileList):\r
770 Order_Dict = {}\r
771 self.BuildOption\r
772 for SingleFile in FileList:\r
773 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r
774 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r
775 if key in Order_Dict:\r
776 Order_Dict[key].append(SingleFile.Ext)\r
777 else:\r
778 Order_Dict[key] = [SingleFile.Ext]\r
779\r
780 RemoveList = []\r
781 for F in Order_Dict:\r
782 if len(Order_Dict[F]) > 1:\r
783 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r
784 for Ext in Order_Dict[F][1:]:\r
785 RemoveList.append(F + Ext)\r
786\r
787 for item in RemoveList:\r
788 FileList.remove(item)\r
789\r
790 return FileList\r
791\r
792 ## Return the list of unicode files\r
793 @cached_property\r
794 def UnicodeFileList(self):\r
795 return self.FileTypes.get(TAB_UNICODE_FILE,[])\r
796\r
797 ## Return the list of vfr files\r
798 @cached_property\r
799 def VfrFileList(self):\r
800 return self.FileTypes.get(TAB_VFR_FILE, [])\r
801\r
802 ## Return the list of Image Definition files\r
803 @cached_property\r
804 def IdfFileList(self):\r
805 return self.FileTypes.get(TAB_IMAGE_FILE,[])\r
806\r
807 ## Return a list of files which can be built from binary\r
808 #\r
809 # "Build" binary files are just to copy them to build directory.\r
810 #\r
811 # @retval list The list of files which can be built later\r
812 #\r
813 @cached_property\r
814 def BinaryFileList(self):\r
815 RetVal = []\r
816 for F in self.Module.Binaries:\r
817 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r
818 continue\r
819 RetVal.append(F)\r
820 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r
821 return RetVal\r
822\r
823 @cached_property\r
824 def BuildRules(self):\r
825 RetVal = {}\r
826 BuildRuleDatabase = self.PlatformInfo.BuildRule\r
827 for Type in BuildRuleDatabase.FileTypeList:\r
828 #first try getting build rule by BuildRuleFamily\r
829 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r
830 if not RuleObject:\r
831 # build type is always module type, but ...\r
832 if self.ModuleType != self.BuildType:\r
833 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r
834 #second try getting build rule by ToolChainFamily\r
835 if not RuleObject:\r
836 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r
837 if not RuleObject:\r
838 # build type is always module type, but ...\r
839 if self.ModuleType != self.BuildType:\r
840 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r
841 if not RuleObject:\r
842 continue\r
843 RuleObject = RuleObject.Instantiate(self.Macros)\r
844 RetVal[Type] = RuleObject\r
845 for Ext in RuleObject.SourceFileExtList:\r
846 RetVal[Ext] = RuleObject\r
847 return RetVal\r
848\r
849 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r
850 if self._BuildTargets is None:\r
851 self._IntroBuildTargetList = set()\r
852 self._FinalBuildTargetList = set()\r
853 self._BuildTargets = defaultdict(set)\r
854 self._FileTypes = defaultdict(set)\r
855\r
856 if not BinaryFileList:\r
857 BinaryFileList = self.BinaryFileList\r
858\r
859 SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r
860 if not os.path.exists(SubDirectory):\r
861 CreateDirectory(SubDirectory)\r
65112778
PG
862 TargetList = set()\r
863 FinalTargetName = set()\r
e8449e1d
FB
864 RuleChain = set()\r
865 SourceList = [File]\r
866 Index = 0\r
867 #\r
868 # Make sure to get build rule order value\r
869 #\r
870 self.BuildOption\r
871\r
872 while Index < len(SourceList):\r
65112778
PG
873 # Reset the FileType if not the first iteration.\r
874 if Index > 0:\r
875 FileType = TAB_UNKNOWN_FILE\r
e8449e1d
FB
876 Source = SourceList[Index]\r
877 Index = Index + 1\r
878\r
879 if Source != File:\r
880 CreateDirectory(Source.Dir)\r
881\r
882 if File.IsBinary and File == Source and File in BinaryFileList:\r
883 # Skip all files that are not binary libraries\r
884 if not self.IsLibrary:\r
885 continue\r
886 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r
887 elif FileType in self.BuildRules:\r
888 RuleObject = self.BuildRules[FileType]\r
889 elif Source.Ext in self.BuildRules:\r
890 RuleObject = self.BuildRules[Source.Ext]\r
891 else:\r
65112778
PG
892 # No more rule to apply: Source is a final target.\r
893 FinalTargetName.add(Source)\r
894 continue\r
e8449e1d
FB
895\r
896 FileType = RuleObject.SourceFileType\r
897 self._FileTypes[FileType].add(Source)\r
898\r
899 # stop at STATIC_LIBRARY for library\r
900 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r
65112778
PG
901 FinalTargetName.add(Source)\r
902 continue\r
e8449e1d
FB
903\r
904 Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r
905 if not Target:\r
65112778
PG
906 # No Target: Source is a final target.\r
907 FinalTargetName.add(Source)\r
908 continue\r
e8449e1d 909\r
65112778 910 TargetList.add(Target)\r
e8449e1d
FB
911 self._BuildTargets[FileType].add(Target)\r
912\r
913 if not Source.IsBinary and Source == File:\r
914 self._IntroBuildTargetList.add(Target)\r
915\r
916 # to avoid cyclic rule\r
917 if FileType in RuleChain:\r
65112778 918 EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))\r
e8449e1d
FB
919\r
920 RuleChain.add(FileType)\r
921 SourceList.extend(Target.Outputs)\r
65112778
PG
922\r
923 # For each final target name, retrieve the corresponding TargetDescBlock instance.\r
924 for FTargetName in FinalTargetName:\r
925 for Target in TargetList:\r
926 if FTargetName == Target.Target:\r
927 self._FinalBuildTargetList.add(Target)\r
e8449e1d
FB
928\r
929 @cached_property\r
930 def Targets(self):\r
931 if self._BuildTargets is None:\r
932 self._IntroBuildTargetList = set()\r
933 self._FinalBuildTargetList = set()\r
934 self._BuildTargets = defaultdict(set)\r
935 self._FileTypes = defaultdict(set)\r
936\r
937 #TRICK: call SourceFileList property to apply build rule for source files\r
938 self.SourceFileList\r
939\r
940 #TRICK: call _GetBinaryFileList to apply build rule for binary files\r
941 self.BinaryFileList\r
942\r
943 return self._BuildTargets\r
944\r
945 @cached_property\r
946 def IntroTargetList(self):\r
947 self.Targets\r
948 return self._IntroBuildTargetList\r
949\r
950 @cached_property\r
951 def CodaTargetList(self):\r
952 self.Targets\r
953 return self._FinalBuildTargetList\r
954\r
955 @cached_property\r
956 def FileTypes(self):\r
957 self.Targets\r
958 return self._FileTypes\r
959\r
bf1ea933 960 ## Get the list of package object the module depends on and the Platform depends on\r
e8449e1d
FB
961 #\r
962 # @retval list The package object list\r
963 #\r
964 @cached_property\r
965 def DependentPackageList(self):\r
bf1ea933 966 return self.PackageList\r
e8449e1d
FB
967\r
968 ## Return the list of auto-generated code file\r
969 #\r
970 # @retval list The list of auto-generated file\r
971 #\r
972 @cached_property\r
973 def AutoGenFileList(self):\r
974 AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r
975 UniStringBinBuffer = BytesIO()\r
976 IdfGenBinBuffer = BytesIO()\r
977 RetVal = {}\r
978 AutoGenC = TemplateString()\r
979 AutoGenH = TemplateString()\r
980 StringH = TemplateString()\r
981 StringIdf = TemplateString()\r
982 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r
983 #\r
984 # AutoGen.c is generated if there are library classes in inf, or there are object files\r
985 #\r
986 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r
987 or TAB_OBJECT_FILE in self.FileTypes):\r
988 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r
989 RetVal[AutoFile] = str(AutoGenC)\r
990 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
991 if str(AutoGenH) != "":\r
992 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r
993 RetVal[AutoFile] = str(AutoGenH)\r
994 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
995 if str(StringH) != "":\r
996 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
997 RetVal[AutoFile] = str(StringH)\r
998 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
999 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
1000 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
1001 RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
1002 AutoFile.IsBinary = True\r
1003 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
1004 if UniStringBinBuffer is not None:\r
1005 UniStringBinBuffer.close()\r
1006 if str(StringIdf) != "":\r
1007 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
1008 RetVal[AutoFile] = str(StringIdf)\r
1009 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
1010 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
1011 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
1012 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
1013 AutoFile.IsBinary = True\r
1014 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
1015 if IdfGenBinBuffer is not None:\r
1016 IdfGenBinBuffer.close()\r
1017 return RetVal\r
1018\r
1019 ## Return the list of library modules explicitly or implicitly used by this module\r
1020 @cached_property\r
1021 def DependentLibraryList(self):\r
1022 # only merge library classes and PCD for non-library module\r
1023 if self.IsLibrary:\r
1024 return []\r
1025 return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r
1026\r
1027 ## Get the list of PCDs from current module\r
1028 #\r
1029 # @retval list The list of PCD\r
1030 #\r
1031 @cached_property\r
1032 def ModulePcdList(self):\r
1033 # apply PCD settings from platform\r
09af9bd9 1034 RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)\r
e8449e1d
FB
1035\r
1036 return RetVal\r
1037 @cached_property\r
1038 def _PcdComments(self):\r
1039 ReVal = OrderedListDict()\r
1040 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)\r
1041 if not self.IsLibrary:\r
1042 for Library in self.DependentLibraryList:\r
1043 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)\r
1044 return ReVal\r
1045\r
1046 ## Get the list of PCDs from dependent libraries\r
1047 #\r
1048 # @retval list The list of PCD\r
1049 #\r
1050 @cached_property\r
1051 def LibraryPcdList(self):\r
1052 if self.IsLibrary:\r
1053 return []\r
1054 RetVal = []\r
1055 Pcds = set()\r
1056 # get PCDs from dependent libraries\r
1057 for Library in self.DependentLibraryList:\r
1058 PcdsInLibrary = OrderedDict()\r
1059 for Key in Library.Pcds:\r
1060 # skip duplicated PCDs\r
1061 if Key in self.Module.Pcds or Key in Pcds:\r
1062 continue\r
1063 Pcds.add(Key)\r
1064 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r
09af9bd9 1065 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))\r
e8449e1d
FB
1066 return RetVal\r
1067\r
1068 ## Get the GUID value mapping\r
1069 #\r
1070 # @retval dict The mapping between GUID cname and its value\r
1071 #\r
1072 @cached_property\r
1073 def GuidList(self):\r
1074 RetVal = self.Module.Guids\r
1075 for Library in self.DependentLibraryList:\r
1076 RetVal.update(Library.Guids)\r
1077 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r
1078 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r
1079 return RetVal\r
1080\r
1081 @cached_property\r
1082 def GetGuidsUsedByPcd(self):\r
1083 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r
1084 for Library in self.DependentLibraryList:\r
1085 RetVal.update(Library.GetGuidsUsedByPcd())\r
1086 return RetVal\r
1087 ## Get the protocol value mapping\r
1088 #\r
1089 # @retval dict The mapping between protocol cname and its value\r
1090 #\r
1091 @cached_property\r
1092 def ProtocolList(self):\r
1093 RetVal = OrderedDict(self.Module.Protocols)\r
1094 for Library in self.DependentLibraryList:\r
1095 RetVal.update(Library.Protocols)\r
1096 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r
1097 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r
1098 return RetVal\r
1099\r
1100 ## Get the PPI value mapping\r
1101 #\r
1102 # @retval dict The mapping between PPI cname and its value\r
1103 #\r
1104 @cached_property\r
1105 def PpiList(self):\r
1106 RetVal = OrderedDict(self.Module.Ppis)\r
1107 for Library in self.DependentLibraryList:\r
1108 RetVal.update(Library.Ppis)\r
1109 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r
1110 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r
1111 return RetVal\r
1112\r
1113 ## Get the list of include search path\r
1114 #\r
1115 # @retval list The list path\r
1116 #\r
1117 @cached_property\r
1118 def IncludePathList(self):\r
1119 RetVal = []\r
1120 RetVal.append(self.MetaFile.Dir)\r
1121 RetVal.append(self.DebugDir)\r
1122\r
bf1ea933 1123 for Package in self.PackageList:\r
e8449e1d
FB
1124 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
1125 if PackageDir not in RetVal:\r
1126 RetVal.append(PackageDir)\r
1127 IncludesList = Package.Includes\r
1128 if Package._PrivateIncludes:\r
1129 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r
1130 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
1131 for Inc in IncludesList:\r
1132 if Inc not in RetVal:\r
1133 RetVal.append(str(Inc))\r
0c3e8e99 1134 RetVal.extend(self.IncPathFromBuildOptions)\r
e8449e1d
FB
1135 return RetVal\r
1136\r
0c3e8e99
BF
1137 @cached_property\r
1138 def IncPathFromBuildOptions(self):\r
1139 IncPathList = []\r
1140 for tool in self.BuildOption:\r
1141 if 'FLAGS' in self.BuildOption[tool]:\r
1142 flags = self.BuildOption[tool]['FLAGS']\r
1143 whitespace = False\r
1144 for flag in flags.split(" "):\r
1145 flag = flag.strip()\r
1146 if flag.startswith(("/I","-I")):\r
1147 if len(flag)>2:\r
1148 if os.path.exists(flag[2:]):\r
1149 IncPathList.append(flag[2:])\r
1150 else:\r
1151 whitespace = True\r
1152 continue\r
1153 if whitespace and flag:\r
1154 if os.path.exists(flag):\r
1155 IncPathList.append(flag)\r
1156 whitespace = False\r
1157 return IncPathList\r
1158\r
e8449e1d
FB
1159 @cached_property\r
1160 def IncludePathLength(self):\r
1161 return sum(len(inc)+1 for inc in self.IncludePathList)\r
1162\r
82407bd1
RC
1163 ## Get the list of include paths from the packages\r
1164 #\r
1165 # @IncludesList list The list path\r
1166 #\r
1167 @cached_property\r
1168 def PackageIncludePathList(self):\r
1169 IncludesList = []\r
bf1ea933 1170 for Package in self.PackageList:\r
82407bd1
RC
1171 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
1172 IncludesList = Package.Includes\r
1173 if Package._PrivateIncludes:\r
1174 if not self.MetaFile.Path.startswith(PackageDir):\r
1175 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r
1176 return IncludesList\r
1177\r
e8449e1d
FB
1178 ## Get HII EX PCDs which maybe used by VFR\r
1179 #\r
1180 # efivarstore used by VFR may relate with HII EX PCDs\r
1181 # Get the variable name and GUID from efivarstore and HII EX PCD\r
1182 # List the HII EX PCDs in As Built INF if both name and GUID match.\r
1183 #\r
1184 # @retval list HII EX PCDs\r
1185 #\r
1186 def _GetPcdsMaybeUsedByVfr(self):\r
1187 if not self.SourceFileList:\r
1188 return []\r
1189\r
1190 NameGuids = set()\r
1191 for SrcFile in self.SourceFileList:\r
1192 if SrcFile.Ext.lower() != '.vfr':\r
1193 continue\r
1194 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r
1195 if not os.path.exists(Vfri):\r
1196 continue\r
1197 VfriFile = open(Vfri, 'r')\r
1198 Content = VfriFile.read()\r
1199 VfriFile.close()\r
1200 Pos = Content.find('efivarstore')\r
1201 while Pos != -1:\r
1202 #\r
1203 # Make sure 'efivarstore' is the start of efivarstore statement\r
1204 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r
1205 #\r
1206 Index = Pos - 1\r
1207 while Index >= 0 and Content[Index] in ' \t\r\n':\r
1208 Index -= 1\r
1209 if Index >= 0 and Content[Index] != ';':\r
1210 Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r
1211 continue\r
1212 #\r
1213 # 'efivarstore' must be followed by name and guid\r
1214 #\r
1215 Name = gEfiVarStoreNamePattern.search(Content, Pos)\r
1216 if not Name:\r
1217 break\r
1218 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r
1219 if not Guid:\r
1220 break\r
1221 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r
1222 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r
1223 Pos = Content.find('efivarstore', Name.end())\r
1224 if not NameGuids:\r
1225 return []\r
1226 HiiExPcds = []\r
1227 for Pcd in self.PlatformInfo.Pcds.values():\r
1228 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r
1229 continue\r
1230 for SkuInfo in Pcd.SkuInfoList.values():\r
1231 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r
1232 if not Value:\r
1233 continue\r
1234 Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r
1235 Guid = GuidStructureStringToGuidString(Value)\r
1236 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r
1237 HiiExPcds.append(Pcd)\r
1238 break\r
1239\r
1240 return HiiExPcds\r
1241\r
1242 def _GenOffsetBin(self):\r
1243 VfrUniBaseName = {}\r
1244 for SourceFile in self.Module.Sources:\r
1245 if SourceFile.Type.upper() == ".VFR" :\r
1246 #\r
1247 # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r
1248 #\r
1249 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r
1250 elif SourceFile.Type.upper() == ".UNI" :\r
1251 #\r
1252 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r
1253 #\r
1254 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r
1255\r
1256 if not VfrUniBaseName:\r
1257 return None\r
1258 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r
1259 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r
1260 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r
1261 if not VfrUniOffsetList:\r
1262 return None\r
1263\r
1264 OutputName = '%sOffset.bin' % self.Name\r
1265 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r
1266\r
1267 try:\r
1268 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r
1269 except:\r
1270 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
1271\r
1272 # Use a instance of BytesIO to cache data\r
1273 fStringIO = BytesIO()\r
1274\r
1275 for Item in VfrUniOffsetList:\r
1276 if (Item[0].find("Strings") != -1):\r
1277 #\r
1278 # UNI offset in image.\r
1279 # GUID + Offset\r
1280 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
1281 #\r
1282 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
1283 fStringIO.write(UniGuid)\r
1284 UniValue = pack ('Q', int (Item[1], 16))\r
1285 fStringIO.write (UniValue)\r
1286 else:\r
1287 #\r
1288 # VFR binary offset in image.\r
1289 # GUID + Offset\r
1290 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
1291 #\r
1292 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
1293 fStringIO.write(VfrGuid)\r
1294 VfrValue = pack ('Q', int (Item[1], 16))\r
1295 fStringIO.write (VfrValue)\r
1296 #\r
1297 # write data into file.\r
1298 #\r
1299 try :\r
1300 fInputfile.write (fStringIO.getvalue())\r
1301 except:\r
1302 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r
1303 "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r
1304\r
1305 fStringIO.close ()\r
1306 fInputfile.close ()\r
1307 return OutputName\r
d01a9986 1308\r
e8449e1d
FB
1309 @cached_property\r
1310 def OutputFile(self):\r
1311 retVal = set()\r
40db176d 1312\r
91f6c533 1313 for Root, Dirs, Files in os.walk(self.BuildDir):\r
e8449e1d 1314 for File in Files:\r
40db176d 1315 # lib file is already added through above CodaTargetList, skip it here\r
91f6c533
SS
1316 if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
1317 NewFile = path.join(Root, File)\r
40db176d 1318 retVal.add(NewFile)\r
e8449e1d 1319\r
40db176d 1320 for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
d01a9986 1321 for File in Files:\r
91f6c533 1322 NewFile = path.join(Root, File)\r
40db176d 1323 retVal.add(NewFile)\r
d01a9986 1324\r
e8449e1d
FB
1325 return retVal\r
1326\r
1327 ## Create AsBuilt INF file the module\r
1328 #\r
1329 def CreateAsBuiltInf(self):\r
1330\r
1331 if self.IsAsBuiltInfCreated:\r
1332 return\r
1333\r
1334 # Skip INF file generation for libraries\r
1335 if self.IsLibrary:\r
1336 return\r
1337\r
1338 # Skip the following code for modules with no source files\r
1339 if not self.SourceFileList:\r
1340 return\r
1341\r
1342 # Skip the following code for modules without any binary files\r
1343 if self.BinaryFileList:\r
1344 return\r
1345\r
1346 ### TODO: How to handles mixed source and binary modules\r
1347\r
1348 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r
1349 # Also find all packages that the DynamicEx PCDs depend on\r
1350 Pcds = []\r
1351 PatchablePcds = []\r
1352 Packages = []\r
1353 PcdCheckList = []\r
1354 PcdTokenSpaceList = []\r
1355 for Pcd in self.ModulePcdList + self.LibraryPcdList:\r
1356 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r
1357 PatchablePcds.append(Pcd)\r
1358 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r
1359 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r
1360 if Pcd not in Pcds:\r
1361 Pcds.append(Pcd)\r
1362 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r
1363 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r
1364 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r
1365 GuidList = OrderedDict(self.GuidList)\r
1366 for TokenSpace in self.GetGuidsUsedByPcd:\r
1367 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r
1368 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r
1369 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r
1370 GuidList.pop(TokenSpace)\r
1371 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r
1372 for Package in self.DerivedPackageList:\r
1373 if Package in Packages:\r
1374 continue\r
1375 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r
1376 Found = False\r
1377 for Index in range(len(BeChecked)):\r
1378 for Item in CheckList[Index]:\r
1379 if Item in BeChecked[Index]:\r
1380 Packages.append(Package)\r
1381 Found = True\r
1382 break\r
1383 if Found:\r
1384 break\r
1385\r
1386 VfrPcds = self._GetPcdsMaybeUsedByVfr()\r
1387 for Pkg in self.PlatformInfo.PackageList:\r
1388 if Pkg in Packages:\r
1389 continue\r
1390 for VfrPcd in VfrPcds:\r
1391 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r
1392 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r
1393 Packages.append(Pkg)\r
1394 break\r
1395\r
1396 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r
1397 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r
1398 Guid = self.Guid\r
1399 MDefs = self.Module.Defines\r
1400\r
1401 AsBuiltInfDict = {\r
1402 'module_name' : self.Name,\r
1403 'module_guid' : Guid,\r
1404 'module_module_type' : ModuleType,\r
1405 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r
1406 'pcd_is_driver_string' : [],\r
1407 'module_uefi_specification_version' : [],\r
1408 'module_pi_specification_version' : [],\r
1409 'module_entry_point' : self.Module.ModuleEntryPointList,\r
1410 'module_unload_image' : self.Module.ModuleUnloadImageList,\r
1411 'module_constructor' : self.Module.ConstructorList,\r
1412 'module_destructor' : self.Module.DestructorList,\r
1413 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r
1414 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r
1415 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r
1416 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r
1417 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r
1418 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r
1419 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r
1420 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r
1421 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r
1422 'module_arch' : self.Arch,\r
1423 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r
1424 'binary_item' : [],\r
1425 'patchablepcd_item' : [],\r
1426 'pcd_item' : [],\r
1427 'protocol_item' : [],\r
1428 'ppi_item' : [],\r
1429 'guid_item' : [],\r
1430 'flags_item' : [],\r
1431 'libraryclasses_item' : []\r
1432 }\r
1433\r
1434 if 'MODULE_UNI_FILE' in MDefs:\r
1435 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r
1436 if os.path.isfile(UNIFile):\r
1437 shutil.copy2(UNIFile, self.OutputDir)\r
1438\r
1439 if self.AutoGenVersion > int(gInfSpecVersion, 0):\r
1440 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r
1441 else:\r
1442 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r
1443\r
1444 if DriverType:\r
1445 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r
1446\r
1447 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r
1448 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r
1449 if 'PI_SPECIFICATION_VERSION' in self.Specification:\r
1450 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r
1451\r
1452 OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
1453 DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
1454 for Item in self.CodaTargetList:\r
1455 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
1456 if os.path.isabs(File):\r
1457 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
1458 if Item.Target.Ext.lower() == '.aml':\r
1459 AsBuiltInfDict['binary_item'].append('ASL|' + File)\r
1460 elif Item.Target.Ext.lower() == '.acpi':\r
1461 AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r
1462 elif Item.Target.Ext.lower() == '.efi':\r
1463 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r
1464 else:\r
1465 AsBuiltInfDict['binary_item'].append('BIN|' + File)\r
1466 if not self.DepexGenerated:\r
1467 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r
1468 if os.path.exists(DepexFile):\r
1469 self.DepexGenerated = True\r
1470 if self.DepexGenerated:\r
1471 if self.ModuleType in [SUP_MODULE_PEIM]:\r
1472 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r
1473 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r
1474 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r
1475 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r
1476 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r
1477\r
1478 Bin = self._GenOffsetBin()\r
1479 if Bin:\r
1480 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r
1481\r
1482 for Root, Dirs, Files in os.walk(OutputDir):\r
1483 for File in Files:\r
1484 if File.lower().endswith('.pdb'):\r
1485 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r
1486 HeaderComments = self.Module.HeaderComments\r
1487 StartPos = 0\r
1488 for Index in range(len(HeaderComments)):\r
1489 if HeaderComments[Index].find('@BinaryHeader') != -1:\r
1490 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r
1491 StartPos = Index\r
1492 break\r
1493 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r
1494 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r
1495\r
1496 GenList = [\r
1497 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r
1498 (self.PpiList, self._PpiComments, 'ppi_item'),\r
1499 (GuidList, self._GuidComments, 'guid_item')\r
1500 ]\r
1501 for Item in GenList:\r
1502 for CName in Item[0]:\r
1503 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r
1504 Entry = Comments + '\n ' + CName if Comments else CName\r
1505 AsBuiltInfDict[Item[2]].append(Entry)\r
1506 PatchList = parsePcdInfoFromMapFile(\r
1507 os.path.join(self.OutputDir, self.Name + '.map'),\r
1508 os.path.join(self.OutputDir, self.Name + '.efi')\r
1509 )\r
1510 if PatchList:\r
1511 for Pcd in PatchablePcds:\r
1512 TokenCName = Pcd.TokenCName\r
1513 for PcdItem in GlobalData.MixedPcd:\r
1514 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
1515 TokenCName = PcdItem[0]\r
1516 break\r
1517 for PatchPcd in PatchList:\r
1518 if TokenCName == PatchPcd[0]:\r
1519 break\r
1520 else:\r
1521 continue\r
1522 PcdValue = ''\r
1523 if Pcd.DatumType == 'BOOLEAN':\r
1524 BoolValue = Pcd.DefaultValue.upper()\r
1525 if BoolValue == 'TRUE':\r
1526 Pcd.DefaultValue = '1'\r
1527 elif BoolValue == 'FALSE':\r
1528 Pcd.DefaultValue = '0'\r
1529\r
1530 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r
1531 HexFormat = '0x%02x'\r
1532 if Pcd.DatumType == TAB_UINT16:\r
1533 HexFormat = '0x%04x'\r
1534 elif Pcd.DatumType == TAB_UINT32:\r
1535 HexFormat = '0x%08x'\r
1536 elif Pcd.DatumType == TAB_UINT64:\r
1537 HexFormat = '0x%016x'\r
1538 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r
1539 else:\r
1540 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
1541 EdkLogger.error("build", AUTOGEN_ERROR,\r
1542 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1543 )\r
1544 ArraySize = int(Pcd.MaxDatumSize, 0)\r
1545 PcdValue = Pcd.DefaultValue\r
1546 if PcdValue[0] != '{':\r
1547 Unicode = False\r
1548 if PcdValue[0] == 'L':\r
1549 Unicode = True\r
1550 PcdValue = PcdValue.lstrip('L')\r
1551 PcdValue = eval(PcdValue)\r
1552 NewValue = '{'\r
1553 for Index in range(0, len(PcdValue)):\r
1554 if Unicode:\r
1555 CharVal = ord(PcdValue[Index])\r
1556 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r
1557 + '0x%02x' % (CharVal >> 8) + ', '\r
1558 else:\r
1559 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r
1560 Padding = '0x00, '\r
1561 if Unicode:\r
1562 Padding = Padding * 2\r
1563 ArraySize = ArraySize // 2\r
1564 if ArraySize < (len(PcdValue) + 1):\r
1565 if Pcd.MaxSizeUserSet:\r
1566 EdkLogger.error("build", AUTOGEN_ERROR,\r
1567 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1568 )\r
1569 else:\r
1570 ArraySize = len(PcdValue) + 1\r
1571 if ArraySize > len(PcdValue) + 1:\r
1572 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r
1573 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r
1574 elif len(PcdValue.split(',')) <= ArraySize:\r
1575 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r
1576 PcdValue += '}'\r
1577 else:\r
1578 if Pcd.MaxSizeUserSet:\r
1579 EdkLogger.error("build", AUTOGEN_ERROR,\r
1580 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r
1581 )\r
1582 else:\r
1583 ArraySize = len(PcdValue) + 1\r
1584 PcdItem = '%s.%s|%s|0x%X' % \\r
1585 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r
1586 PcdComments = ''\r
1587 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
1588 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r
1589 if PcdComments:\r
1590 PcdItem = PcdComments + '\n ' + PcdItem\r
1591 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r
1592\r
1593 for Pcd in Pcds + VfrPcds:\r
1594 PcdCommentList = []\r
1595 HiiInfo = ''\r
1596 TokenCName = Pcd.TokenCName\r
1597 for PcdItem in GlobalData.MixedPcd:\r
1598 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
1599 TokenCName = PcdItem[0]\r
1600 break\r
1601 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r
1602 for SkuName in Pcd.SkuInfoList:\r
1603 SkuInfo = Pcd.SkuInfoList[SkuName]\r
1604 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r
1605 break\r
1606 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r
1607 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r
1608 if HiiInfo:\r
1609 UsageIndex = -1\r
1610 UsageStr = ''\r
1611 for Index, Comment in enumerate(PcdCommentList):\r
1612 for Usage in UsageList:\r
1613 if Comment.find(Usage) != -1:\r
1614 UsageStr = Usage\r
1615 UsageIndex = Index\r
1616 break\r
1617 if UsageIndex != -1:\r
1618 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r
1619 else:\r
1620 PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r
1621 PcdComments = '\n '.join(PcdCommentList)\r
1622 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r
1623 if PcdComments:\r
1624 PcdEntry = PcdComments + '\n ' + PcdEntry\r
1625 AsBuiltInfDict['pcd_item'].append(PcdEntry)\r
1626 for Item in self.BuildOption:\r
1627 if 'FLAGS' in self.BuildOption[Item]:\r
1628 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r
1629\r
1630 # Generated LibraryClasses section in comments.\r
1631 for Library in self.LibraryAutoGenList:\r
1632 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r
1633\r
1634 # Generated UserExtensions TianoCore section.\r
1635 # All tianocore user extensions are copied.\r
1636 UserExtStr = ''\r
1637 for TianoCore in self._GetTianoCoreUserExtensionList():\r
1638 UserExtStr += '\n'.join(TianoCore)\r
1639 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r
1640 if os.path.isfile(ExtensionFile):\r
1641 shutil.copy2(ExtensionFile, self.OutputDir)\r
1642 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
1643\r
1644 # Generated depex expression section in comments.\r
1645 DepexExpression = self._GetDepexExpresionString()\r
1646 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r
1647\r
1648 AsBuiltInf = TemplateString()\r
1649 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r
1650\r
1651 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r
1652\r
1653 self.IsAsBuiltInfCreated = True\r
1654\r
3bfbc915 1655 def CacheCopyFile(self, DestDir, SourceDir, File):\r
fc8b8dea
SS
1656 if os.path.isdir(File):\r
1657 return\r
1658\r
3bfbc915
SS
1659 sub_dir = os.path.relpath(File, SourceDir)\r
1660 destination_file = os.path.join(DestDir, sub_dir)\r
0e7e7a26
SS
1661 destination_dir = os.path.dirname(destination_file)\r
1662 CreateDirectory(destination_dir)\r
1663 try:\r
1664 CopyFileOnChange(File, destination_dir)\r
1665 except:\r
1666 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r
1667 return\r
1668\r
e8449e1d 1669 def CopyModuleToCache(self):\r
fc8b8dea
SS
1670 # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList\r
1671 # and PreMakeHashFileList files\r
1672 MakeHashStr = None\r
1673 PreMakeHashStr = None\r
1674 MakeTimeStamp = 0\r
1675 PreMakeTimeStamp = 0\r
1676 Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]\r
1677 for File in Files:\r
1678 if ".MakeHashFileList." in File:\r
1679 #find lastest file through time stamp\r
1680 FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
1681 if FileTimeStamp > MakeTimeStamp:\r
1682 MakeTimeStamp = FileTimeStamp\r
1683 MakeHashStr = File.split('.')[-1]\r
1684 if len(MakeHashStr) != 32:\r
1685 EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))\r
1686 if ".PreMakeHashFileList." in File:\r
1687 FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
1688 if FileTimeStamp > PreMakeTimeStamp:\r
1689 PreMakeTimeStamp = FileTimeStamp\r
1690 PreMakeHashStr = File.split('.')[-1]\r
1691 if len(PreMakeHashStr) != 32:\r
1692 EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))\r
0e7e7a26 1693\r
fc8b8dea
SS
1694 if not MakeHashStr:\r
1695 EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
1696 return\r
1697 if not PreMakeHashStr:\r
1698 EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
1699 return\r
0e7e7a26 1700\r
fc8b8dea
SS
1701 # Create Cache destination dirs\r
1702 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
1703 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
1704 CacheFileDir = path.join(FileDir, MakeHashStr)\r
1705 CacheFfsDir = path.join(FfsDir, MakeHashStr)\r
1706 CreateDirectory (CacheFileDir)\r
1707 CreateDirectory (CacheFfsDir)\r
0e7e7a26 1708\r
fc8b8dea
SS
1709 # Create ModuleHashPair file to support multiple version cache together\r
1710 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
1711 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
1712 if os.path.exists(ModuleHashPair):\r
1713 with open(ModuleHashPair, 'r') as f:\r
1714 ModuleHashPairList = json.load(f)\r
1715 if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):\r
1716 ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))\r
1717 with open(ModuleHashPair, 'w') as f:\r
1718 json.dump(ModuleHashPairList, f, indent=2)\r
1719\r
1720 # Copy files to Cache destination dirs\r
e8449e1d
FB
1721 if not self.OutputFile:\r
1722 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
1723 self.OutputFile = Ma.Binaries\r
1724 for File in self.OutputFile:\r
fc8b8dea
SS
1725 if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
1726 self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)\r
1727 else:\r
1728 if self.Name + ".autogen.hash." in File or \\r
1729 self.Name + ".autogen.hashchain." in File or \\r
1730 self.Name + ".hash." in File or \\r
1731 self.Name + ".hashchain." in File or \\r
1732 self.Name + ".PreMakeHashFileList." in File or \\r
1733 self.Name + ".MakeHashFileList." in File:\r
1734 self.CacheCopyFile(FileDir, self.BuildDir, File)\r
d01a9986 1735 else:\r
fc8b8dea 1736 self.CacheCopyFile(CacheFileDir, self.BuildDir, File)\r
e8449e1d
FB
1737 ## Create makefile for the module and its dependent libraries\r
1738 #\r
1739 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r
1740 # dependent libraries will be created\r
1741 #\r
1742 @cached_class_function\r
1743 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
0e7e7a26 1744\r
e8449e1d
FB
1745 # nest this function inside it's only caller.\r
1746 def CreateTimeStamp():\r
1747 FileSet = {self.MetaFile.Path}\r
1748\r
1749 for SourceFile in self.Module.Sources:\r
1750 FileSet.add (SourceFile.Path)\r
1751\r
1752 for Lib in self.DependentLibraryList:\r
1753 FileSet.add (Lib.MetaFile.Path)\r
1754\r
1755 for f in self.AutoGenDepSet:\r
1756 FileSet.add (f.Path)\r
1757\r
1758 if os.path.exists (self.TimeStampPath):\r
1759 os.remove (self.TimeStampPath)\r
df43ea6c
FB
1760\r
1761 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
e8449e1d
FB
1762\r
1763 # Ignore generating makefile when it is a binary module\r
1764 if self.IsBinaryModule:\r
1765 return\r
1766\r
1767 self.GenFfsList = GenFfsList\r
1768\r
1769 if not self.IsLibrary and CreateLibraryMakeFile:\r
1770 for LibraryAutoGen in self.LibraryAutoGenList:\r
1771 LibraryAutoGen.CreateMakeFile()\r
673d09a2 1772\r
0e7e7a26
SS
1773 # CanSkip uses timestamps to determine build skipping\r
1774 if self.CanSkip():\r
e8449e1d
FB
1775 return\r
1776\r
1777 if len(self.CustomMakefile) == 0:\r
1778 Makefile = GenMake.ModuleMakefile(self)\r
1779 else:\r
1780 Makefile = GenMake.CustomMakefile(self)\r
1781 if Makefile.Generate():\r
1782 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r
1783 (self.Name, self.Arch))\r
1784 else:\r
1785 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r
1786 (self.Name, self.Arch))\r
1787\r
1788 CreateTimeStamp()\r
1789\r
0e7e7a26
SS
1790 MakefileType = Makefile._FileType\r
1791 MakefileName = Makefile._FILE_NAME_[MakefileType]\r
1792 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
fc8b8dea
SS
1793 FilePath = path.join(self.BuildDir, self.Name + ".makefile")\r
1794 SaveFileOnChange(FilePath, MakefilePath, False)\r
0e7e7a26 1795\r
e8449e1d
FB
1796 def CopyBinaryFiles(self):\r
1797 for File in self.Module.Binaries:\r
1798 SrcPath = File.Path\r
1799 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r
1800 CopyLongFilePath(SrcPath, DstPath)\r
1801 ## Create autogen code for the module and its dependent libraries\r
1802 #\r
1803 # @param CreateLibraryCodeFile Flag indicating if or not the code of\r
1804 # dependent libraries will be created\r
1805 #\r
1806 def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
0e7e7a26 1807\r
e8449e1d
FB
1808 if self.IsCodeFileCreated:\r
1809 return\r
1810\r
1811 # Need to generate PcdDatabase even PcdDriver is binarymodule\r
1812 if self.IsBinaryModule and self.PcdIsDriver != '':\r
1813 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
1814 return\r
1815 if self.IsBinaryModule:\r
1816 if self.IsLibrary:\r
1817 self.CopyBinaryFiles()\r
1818 return\r
1819\r
1820 if not self.IsLibrary and CreateLibraryCodeFile:\r
1821 for LibraryAutoGen in self.LibraryAutoGenList:\r
1822 LibraryAutoGen.CreateCodeFile()\r
0e7e7a26 1823\r
1f5e4d91 1824 self.LibraryAutoGenList\r
e8449e1d
FB
1825 AutoGenList = []\r
1826 IgoredAutoGenList = []\r
1827\r
1828 for File in self.AutoGenFileList:\r
1829 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r
1830 AutoGenList.append(str(File))\r
1831 else:\r
1832 IgoredAutoGenList.append(str(File))\r
1833\r
1834\r
1835 for ModuleType in self.DepexList:\r
1836 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r
1837 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:\r
1838 continue\r
1839\r
1840 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r
1841 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r
1842\r
1843 if len(Dpx.PostfixNotation) != 0:\r
1844 self.DepexGenerated = True\r
1845\r
1846 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r
1847 AutoGenList.append(str(DpxFile))\r
1848 else:\r
1849 IgoredAutoGenList.append(str(DpxFile))\r
1850\r
1851 if IgoredAutoGenList == []:\r
1852 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r
1853 (" ".join(AutoGenList), self.Name, self.Arch))\r
1854 elif AutoGenList == []:\r
1855 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r
1856 (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r
1857 else:\r
1858 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r
1859 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
1860\r
1861 self.IsCodeFileCreated = True\r
0e7e7a26 1862\r
e8449e1d
FB
1863 return AutoGenList\r
1864\r
1865 ## Summarize the ModuleAutoGen objects of all libraries used by this module\r
1866 @cached_property\r
1867 def LibraryAutoGenList(self):\r
1868 RetVal = []\r
1869 for Library in self.DependentLibraryList:\r
1870 La = ModuleAutoGen(\r
1871 self.Workspace,\r
1872 Library.MetaFile,\r
1873 self.BuildTarget,\r
1874 self.ToolChain,\r
1875 self.Arch,\r
1876 self.PlatformInfo.MetaFile,\r
1877 self.DataPipe\r
1878 )\r
1879 La.IsLibrary = True\r
1880 if La not in RetVal:\r
1881 RetVal.append(La)\r
1882 for Lib in La.CodaTargetList:\r
1883 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
1884 return RetVal\r
1885\r
fc8b8dea
SS
1886 def GenCMakeHash(self):\r
1887 # GenCMakeHash can only be called in --binary-destination\r
1888 # Never called in multiprocessing and always directly save result in main process,\r
1889 # so no need remote dict to share the gCMakeHashFile result with main process\r
e8449e1d 1890\r
fc8b8dea
SS
1891 DependencyFileSet = set()\r
1892 # Add AutoGen files\r
1893 if self.AutoGenFileList:\r
1894 for File in set(self.AutoGenFileList):\r
1895 DependencyFileSet.add(File)\r
1896\r
1897 # Add Makefile\r
1898 abspath = path.join(self.BuildDir, self.Name + ".makefile")\r
1899 try:\r
1900 with open(LongFilePath(abspath),"r") as fd:\r
1901 lines = fd.readlines()\r
1902 except Exception as e:\r
1903 EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
1904 if lines:\r
1905 DependencyFileSet.update(lines)\r
e8449e1d 1906\r
fc8b8dea 1907 # Caculate all above dependency files hash\r
e8449e1d 1908 # Initialze hash object\r
fc8b8dea 1909 FileList = []\r
e8449e1d 1910 m = hashlib.md5()\r
fc8b8dea
SS
1911 for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
1912 if not path.exists(LongFilePath(str(File))):\r
1913 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
1914 continue\r
1915 with open(LongFilePath(str(File)), 'rb') as f:\r
e8449e1d 1916 Content = f.read()\r
fc8b8dea
SS
1917 m.update(Content)\r
1918 FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
e8449e1d 1919\r
fc8b8dea
SS
1920 HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())\r
1921 GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
1922 try:\r
1923 with open(LongFilePath(HashChainFile), 'w') as f:\r
1924 json.dump(FileList, f, indent=2)\r
1925 except:\r
1926 EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
1927 return False\r
0e7e7a26 1928\r
fc8b8dea
SS
1929 def GenModuleHash(self):\r
1930 # GenModuleHash only called after autogen phase\r
1931 # Never called in multiprocessing and always directly save result in main process,\r
1932 # so no need remote dict to share the gModuleHashFile result with main process\r
1933 #\r
1934 # GenPreMakefileHashList consume no dict.\r
1935 # GenPreMakefileHashList produce local gModuleHashFile dict.\r
94459080 1936\r
0e7e7a26
SS
1937 DependencyFileSet = set()\r
1938 # Add Module Meta file\r
fc8b8dea 1939 DependencyFileSet.add(self.MetaFile.Path)\r
0e7e7a26
SS
1940\r
1941 # Add Module's source files\r
1942 if self.SourceFileList:\r
1943 for File in set(self.SourceFileList):\r
fc8b8dea 1944 DependencyFileSet.add(File.Path)\r
0e7e7a26
SS
1945\r
1946 # Add modules's include header files\r
fc8b8dea
SS
1947 # Directly use the deps.txt file in the module BuildDir\r
1948 abspath = path.join(self.BuildDir, "deps.txt")\r
1949 rt = None\r
1950 try:\r
1951 with open(LongFilePath(abspath),"r") as fd:\r
1952 lines = fd.readlines()\r
1953 if lines:\r
1954 rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])\r
1955 except Exception as e:\r
1956 EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
1957\r
1958 if rt:\r
1959 DependencyFileSet.update(rt)\r
0e7e7a26 1960\r
0e7e7a26
SS
1961\r
1962 # Caculate all above dependency files hash\r
1963 # Initialze hash object\r
1964 FileList = []\r
1965 m = hashlib.md5()\r
fc8b8dea 1966 BuildDirStr = path.abspath(self.BuildDir).lower()\r
0e7e7a26 1967 for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
fc8b8dea
SS
1968 # Skip the AutoGen files in BuildDir which already been\r
1969 # included in .autogen.hash. file\r
1970 if BuildDirStr in path.abspath(File).lower():\r
1971 continue\r
1972 if not path.exists(LongFilePath(File)):\r
0e7e7a26
SS
1973 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
1974 continue\r
fc8b8dea 1975 with open(LongFilePath(File), 'rb') as f:\r
94459080 1976 Content = f.read()\r
0e7e7a26 1977 m.update(Content)\r
fc8b8dea 1978 FileList.append((File, hashlib.md5(Content).hexdigest()))\r
0e7e7a26 1979\r
fc8b8dea
SS
1980 HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())\r
1981 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
1982 try:\r
1983 with open(LongFilePath(HashChainFile), 'w') as f:\r
1984 json.dump(FileList, f, indent=2)\r
1985 except:\r
1986 EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
1987 return False\r
0e7e7a26 1988\r
fc8b8dea
SS
1989 def GenPreMakefileHashList(self):\r
1990 # GenPreMakefileHashList consume below dicts:\r
1991 # gPlatformHashFile\r
1992 # gPackageHashFile\r
1993 # gModuleHashFile\r
1994 # GenPreMakefileHashList produce no dict.\r
1995 # gModuleHashFile items might be produced in multiprocessing, so\r
1996 # need check gModuleHashFile remote dict\r
94459080 1997\r
0e7e7a26
SS
1998 # skip binary module\r
1999 if self.IsBinaryModule:\r
2000 return\r
2001\r
fc8b8dea 2002 FileList = []\r
0e7e7a26 2003 m = hashlib.md5()\r
0e7e7a26 2004 # Add Platform level hash\r
fc8b8dea
SS
2005 HashFile = GlobalData.gPlatformHashFile\r
2006 if path.exists(LongFilePath(HashFile)):\r
2007 FileList.append(HashFile)\r
2008 m.update(HashFile.encode('utf-8'))\r
0e7e7a26 2009 else:\r
fc8b8dea 2010 EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)\r
0e7e7a26
SS
2011\r
2012 # Add Package level hash\r
2013 if self.DependentPackageList:\r
2014 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
fc8b8dea
SS
2015 if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:\r
2016 EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))\r
2017 continue\r
2018 HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]\r
2019 if path.exists(LongFilePath(HashFile)):\r
2020 FileList.append(HashFile)\r
2021 m.update(HashFile.encode('utf-8'))\r
0e7e7a26 2022 else:\r
fc8b8dea 2023 EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)\r
0e7e7a26
SS
2024\r
2025 # Add Module self\r
fc8b8dea
SS
2026 # GenPreMakefileHashList needed in both --binary-destination\r
2027 # and --hash. And --hash might save ModuleHashFile in remote dict\r
2028 # during multiprocessing.\r
2029 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
2030 HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
0e7e7a26 2031 else:\r
fc8b8dea
SS
2032 EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2033 if path.exists(LongFilePath(HashFile)):\r
2034 FileList.append(HashFile)\r
2035 m.update(HashFile.encode('utf-8'))\r
0e7e7a26 2036 else:\r
fc8b8dea 2037 EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
0e7e7a26 2038\r
fc8b8dea
SS
2039 # Add Library hash\r
2040 if self.LibraryAutoGenList:\r
2041 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
0e7e7a26 2042\r
fc8b8dea
SS
2043 if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
2044 HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
2045 else:\r
2046 EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
2047 if path.exists(LongFilePath(HashFile)):\r
2048 FileList.append(HashFile)\r
2049 m.update(HashFile.encode('utf-8'))\r
2050 else:\r
2051 EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
0e7e7a26 2052\r
fc8b8dea
SS
2053 # Save PreMakeHashFileList\r
2054 FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())\r
2055 try:\r
2056 with open(LongFilePath(FilePath), 'w') as f:\r
2057 json.dump(FileList, f, indent=0)\r
2058 except:\r
2059 EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)\r
0e7e7a26 2060\r
fc8b8dea
SS
2061 def GenMakefileHashList(self):\r
2062 # GenMakefileHashList only need in --binary-destination which will\r
2063 # everything in local dict. So don't need check remote dict.\r
94459080 2064\r
0e7e7a26
SS
2065 # skip binary module\r
2066 if self.IsBinaryModule:\r
2067 return\r
2068\r
fc8b8dea 2069 FileList = []\r
0e7e7a26 2070 m = hashlib.md5()\r
fc8b8dea
SS
2071 # Add AutoGen hash\r
2072 HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]\r
2073 if path.exists(LongFilePath(HashFile)):\r
2074 FileList.append(HashFile)\r
2075 m.update(HashFile.encode('utf-8'))\r
2076 else:\r
2077 EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)\r
0e7e7a26 2078\r
fc8b8dea
SS
2079 # Add Module self\r
2080 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
2081 HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
2082 else:\r
2083 EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
2084 if path.exists(LongFilePath(HashFile)):\r
2085 FileList.append(HashFile)\r
2086 m.update(HashFile.encode('utf-8'))\r
2087 else:\r
2088 EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
0e7e7a26
SS
2089\r
2090 # Add Library hash\r
2091 if self.LibraryAutoGenList:\r
fc8b8dea
SS
2092 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
2093 if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
2094 HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
2095 else:\r
2096 EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
2097 if path.exists(LongFilePath(HashFile)):\r
2098 FileList.append(HashFile)\r
2099 m.update(HashFile.encode('utf-8'))\r
2100 else:\r
2101 EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
0e7e7a26 2102\r
fc8b8dea
SS
2103 # Save MakeHashFileList\r
2104 FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())\r
2105 try:\r
2106 with open(LongFilePath(FilePath), 'w') as f:\r
2107 json.dump(FileList, f, indent=0)\r
2108 except:\r
2109 EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)\r
2110\r
2111 def CheckHashChainFile(self, HashChainFile):\r
2112 # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'\r
2113 # The x is module name and the 16BytesHexStr is md5 hexdigest of\r
2114 # all hashchain files content\r
2115 HashStr = HashChainFile.split('.')[-1]\r
2116 if len(HashStr) != 32:\r
2117 EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))\r
2118 return False\r
0e7e7a26 2119\r
fc8b8dea
SS
2120 try:\r
2121 with open(LongFilePath(HashChainFile), 'r') as f:\r
2122 HashChainList = json.load(f)\r
2123 except:\r
2124 EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)\r
2125 return False\r
0e7e7a26 2126\r
fc8b8dea
SS
2127 # Print the different file info\r
2128 # print(HashChainFile)\r
2129 for idx, (SrcFile, SrcHash) in enumerate (HashChainList):\r
2130 if SrcFile in GlobalData.gFileHashDict:\r
2131 DestHash = GlobalData.gFileHashDict[SrcFile]\r
2132 else:\r
2133 try:\r
2134 with open(LongFilePath(SrcFile), 'rb') as f:\r
2135 Content = f.read()\r
2136 DestHash = hashlib.md5(Content).hexdigest()\r
2137 GlobalData.gFileHashDict[SrcFile] = DestHash\r
2138 except IOError as X:\r
2139 # cache miss if SrcFile is removed in new version code\r
2140 GlobalData.gFileHashDict[SrcFile] = 0\r
2141 EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
2142 return False\r
2143 if SrcHash != DestHash:\r
2144 EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
2145 return False\r
2146\r
2147 return True\r
0e7e7a26
SS
2148\r
2149 ## Decide whether we can skip the left autogen and make process\r
fc8b8dea
SS
2150 def CanSkipbyMakeCache(self):\r
2151 # For --binary-source only\r
2152 # CanSkipbyMakeCache consume below dicts:\r
2153 # gModuleMakeCacheStatus\r
2154 # gHashChainStatus\r
2155 # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.\r
2156 # all these dicts might be produced in multiprocessing, so\r
2157 # need check these remote dict\r
2158\r
0e7e7a26
SS
2159 if not GlobalData.gBinCacheSource:\r
2160 return False\r
2161\r
fc8b8dea
SS
2162 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:\r
2163 return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
94459080 2164\r
fc8b8dea 2165 # If Module is binary, which has special build rule, do not skip by cache.\r
0e7e7a26 2166 if self.IsBinaryModule:\r
fc8b8dea
SS
2167 print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
2168 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
0e7e7a26
SS
2169 return False\r
2170\r
fc8b8dea 2171 # see .inc as binary file, do not skip by hash\r
0e7e7a26
SS
2172 for f_ext in self.SourceFileList:\r
2173 if '.inc' in str(f_ext):\r
fc8b8dea
SS
2174 print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
2175 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
0e7e7a26
SS
2176 return False\r
2177\r
fc8b8dea 2178 ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
0e7e7a26
SS
2179 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
2180\r
2181 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
fc8b8dea 2182 ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
0e7e7a26 2183 try:\r
fc8b8dea 2184 with open(LongFilePath(ModuleHashPair), 'r') as f:\r
94459080 2185 ModuleHashPairList = json.load(f)\r
0e7e7a26 2186 except:\r
fc8b8dea
SS
2187 # ModuleHashPair might not exist for new added module\r
2188 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
0e7e7a26 2189 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
fc8b8dea 2190 print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
0e7e7a26
SS
2191 return False\r
2192\r
fc8b8dea 2193 # Check the PreMakeHash in ModuleHashPairList one by one\r
0e7e7a26 2194 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
fc8b8dea
SS
2195 SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
2196 SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
2197 PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
2198 MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
0e7e7a26 2199\r
fc8b8dea
SS
2200 try:\r
2201 with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:\r
2202 MakeHashFileList = json.load(f)\r
2203 except:\r
2204 EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)\r
2205 continue\r
0e7e7a26 2206\r
fc8b8dea
SS
2207 HashMiss = False\r
2208 for HashChainFile in MakeHashFileList:\r
2209 HashChainStatus = None\r
2210 if HashChainFile in GlobalData.gHashChainStatus:\r
2211 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
2212 if HashChainStatus == False:\r
2213 HashMiss = True\r
2214 break\r
2215 elif HashChainStatus == True:\r
2216 continue\r
2217 # Convert to path start with cache source dir\r
2218 RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
2219 NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
2220 if self.CheckHashChainFile(NewFilePath):\r
2221 GlobalData.gHashChainStatus[HashChainFile] = True\r
2222 # Save the module self HashFile for GenPreMakefileHashList later usage\r
2223 if self.Name + ".hashchain." in HashChainFile:\r
2224 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
2225 else:\r
2226 GlobalData.gHashChainStatus[HashChainFile] = False\r
2227 HashMiss = True\r
2228 break\r
0e7e7a26 2229\r
fc8b8dea
SS
2230 if HashMiss:\r
2231 continue\r
0e7e7a26 2232\r
fc8b8dea
SS
2233 # PreMakefile cache hit, restore the module build result\r
2234 for root, dir, files in os.walk(SourceHashDir):\r
0e7e7a26
SS
2235 for f in files:\r
2236 File = path.join(root, f)\r
fc8b8dea
SS
2237 self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
2238 if os.path.exists(SourceFfsHashDir):\r
2239 for root, dir, files in os.walk(SourceFfsHashDir):\r
2240 for f in files:\r
2241 File = path.join(root, f)\r
2242 self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
2243\r
2244 if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
2245 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
2246\r
2247 print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)\r
2248 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
2249 return True\r
0e7e7a26 2250\r
fc8b8dea
SS
2251 print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
2252 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
2253 return False\r
0e7e7a26 2254\r
fc8b8dea
SS
2255 ## Decide whether we can skip the left autogen and make process\r
2256 def CanSkipbyPreMakeCache(self):\r
2257 # CanSkipbyPreMakeCache consume below dicts:\r
2258 # gModulePreMakeCacheStatus\r
2259 # gHashChainStatus\r
2260 # gModuleHashFile\r
2261 # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.\r
2262 # all these dicts might be produced in multiprocessing, so\r
2263 # need check these remote dicts\r
2264\r
2265 if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:\r
0e7e7a26
SS
2266 return False\r
2267\r
fc8b8dea
SS
2268 if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:\r
2269 return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
94459080 2270\r
fc8b8dea 2271 # If Module is binary, which has special build rule, do not skip by cache.\r
0e7e7a26 2272 if self.IsBinaryModule:\r
fc8b8dea
SS
2273 print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
2274 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
0e7e7a26
SS
2275 return False\r
2276\r
fc8b8dea 2277 # see .inc as binary file, do not skip by hash\r
0e7e7a26
SS
2278 for f_ext in self.SourceFileList:\r
2279 if '.inc' in str(f_ext):\r
fc8b8dea
SS
2280 print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
2281 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
0e7e7a26
SS
2282 return False\r
2283\r
fc8b8dea 2284 # For --hash only in the incremental build\r
56c786b0 2285 if not GlobalData.gBinCacheSource:\r
fc8b8dea
SS
2286 Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]\r
2287 PreMakeHashFileList_FilePah = None\r
2288 MakeTimeStamp = 0\r
2289 # Find latest PreMakeHashFileList file in self.BuildDir folder\r
2290 for File in Files:\r
2291 if ".PreMakeHashFileList." in File:\r
2292 FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]\r
2293 if FileTimeStamp > MakeTimeStamp:\r
2294 MakeTimeStamp = FileTimeStamp\r
2295 PreMakeHashFileList_FilePah = File\r
2296 if not PreMakeHashFileList_FilePah:\r
2297 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
2298 return False\r
56c786b0 2299\r
fc8b8dea
SS
2300 try:\r
2301 with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
2302 PreMakeHashFileList = json.load(f)\r
2303 except:\r
2304 EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
2305 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
2306 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
2307 return False\r
56c786b0 2308\r
fc8b8dea
SS
2309 HashMiss = False\r
2310 for HashChainFile in PreMakeHashFileList:\r
2311 HashChainStatus = None\r
2312 if HashChainFile in GlobalData.gHashChainStatus:\r
2313 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
2314 if HashChainStatus == False:\r
2315 HashMiss = True\r
2316 break\r
2317 elif HashChainStatus == True:\r
2318 continue\r
2319 if self.CheckHashChainFile(HashChainFile):\r
2320 GlobalData.gHashChainStatus[HashChainFile] = True\r
2321 # Save the module self HashFile for GenPreMakefileHashList later usage\r
2322 if self.Name + ".hashchain." in HashChainFile:\r
2323 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
2324 else:\r
2325 GlobalData.gHashChainStatus[HashChainFile] = False\r
2326 HashMiss = True\r
2327 break\r
56c786b0 2328\r
fc8b8dea
SS
2329 if HashMiss:\r
2330 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
2331 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
2332 return False\r
2333 else:\r
2334 print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
2335 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
2336 return True\r
56c786b0 2337\r
fc8b8dea
SS
2338 ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
2339 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
56c786b0
SS
2340\r
2341 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
fc8b8dea 2342 ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
56c786b0 2343 try:\r
fc8b8dea 2344 with open(LongFilePath(ModuleHashPair), 'r') as f:\r
94459080 2345 ModuleHashPairList = json.load(f)\r
56c786b0 2346 except:\r
fc8b8dea
SS
2347 # ModuleHashPair might not exist for new added module\r
2348 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
2349 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
2350 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
2351 return False\r
56c786b0 2352\r
fc8b8dea 2353 # Check the PreMakeHash in ModuleHashPairList one by one\r
56c786b0 2354 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
fc8b8dea
SS
2355 SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
2356 SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
2357 PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
2358 MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
56c786b0 2359\r
56c786b0 2360 try:\r
fc8b8dea
SS
2361 with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
2362 PreMakeHashFileList = json.load(f)\r
56c786b0 2363 except:\r
fc8b8dea
SS
2364 EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
2365 continue\r
56c786b0 2366\r
fc8b8dea
SS
2367 HashMiss = False\r
2368 for HashChainFile in PreMakeHashFileList:\r
2369 HashChainStatus = None\r
2370 if HashChainFile in GlobalData.gHashChainStatus:\r
2371 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
2372 if HashChainStatus == False:\r
2373 HashMiss = True\r
2374 break\r
2375 elif HashChainStatus == True:\r
2376 continue\r
2377 # Convert to path start with cache source dir\r
2378 RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
2379 NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
2380 if self.CheckHashChainFile(NewFilePath):\r
2381 GlobalData.gHashChainStatus[HashChainFile] = True\r
2382 else:\r
2383 GlobalData.gHashChainStatus[HashChainFile] = False\r
2384 HashMiss = True\r
2385 break\r
e8449e1d 2386\r
fc8b8dea
SS
2387 if HashMiss:\r
2388 continue\r
e8449e1d 2389\r
fc8b8dea
SS
2390 # PreMakefile cache hit, restore the module build result\r
2391 for root, dir, files in os.walk(SourceHashDir):\r
2392 for f in files:\r
2393 File = path.join(root, f)\r
2394 self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
2395 if os.path.exists(SourceFfsHashDir):\r
2396 for root, dir, files in os.walk(SourceFfsHashDir):\r
2397 for f in files:\r
2398 File = path.join(root, f)\r
2399 self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
2400\r
2401 if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
2402 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
2403\r
2404 print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
2405 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
2406 return True\r
e8449e1d 2407\r
fc8b8dea
SS
2408 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
2409 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
2410 return False\r
e8449e1d 2411\r
fc8b8dea
SS
2412 ## Decide whether we can skip the Module build\r
2413 def CanSkipbyCache(self, gHitSet):\r
2414 # Hashing feature is off\r
2415 if not GlobalData.gBinCacheSource:\r
0e7e7a26
SS
2416 return False\r
2417\r
fc8b8dea 2418 if self in gHitSet:\r
0e7e7a26 2419 return True\r
e8449e1d 2420\r
0e7e7a26 2421 return False\r
e8449e1d
FB
2422\r
2423 ## Decide whether we can skip the ModuleAutoGen process\r
2424 # If any source file is newer than the module than we cannot skip\r
2425 #\r
2426 def CanSkip(self):\r
0e7e7a26
SS
2427 # Don't skip if cache feature enabled\r
2428 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r
2429 return False\r
e8449e1d
FB
2430 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r
2431 return True\r
2432 if not os.path.exists(self.TimeStampPath):\r
2433 return False\r
2434 #last creation time of the module\r
2435 DstTimeStamp = os.stat(self.TimeStampPath)[8]\r
2436\r
2437 SrcTimeStamp = self.Workspace._SrcTimeStamp\r
2438 if SrcTimeStamp > DstTimeStamp:\r
2439 return False\r
2440\r
2441 with open(self.TimeStampPath,'r') as f:\r
2442 for source in f:\r
2443 source = source.rstrip('\n')\r
2444 if not os.path.exists(source):\r
2445 return False\r
2446 if source not in ModuleAutoGen.TimeDict :\r
2447 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r
2448 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r
2449 return False\r
2450 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r
2451 return True\r
2452\r
2453 @cached_property\r
2454 def TimeStampPath(self):\r
2455 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r