]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: enhance the CacheCopyFile method arg names
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 from AutoGen.AutoGen import AutoGen
9 from Common.LongFilePathSupport import CopyLongFilePath
10 from Common.BuildToolError import *
11 from Common.DataType import *
12 from Common.Misc import *
13 from Common.StringUtils import NormPath,GetSplitList
14 from collections import defaultdict
15 from Workspace.WorkspaceCommon import OrderedListDict
16 import os.path as path
17 import copy
18 import hashlib
19 from . import InfSectionParser
20 from . import GenC
21 from . import GenMake
22 from . import GenDepex
23 from io import BytesIO
24 from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
25 from Workspace.MetaFileCommentParser import UsageList
26 from .GenPcdDb import CreatePcdDatabaseCode
27 from Common.caching import cached_class_function
28 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
29 from AutoGen.CacheIR import ModuleBuildCacheIR
30 import json
31 import tempfile
32
33 ## Mapping Makefile type
34 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
35 #
36 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
37 # is the former use /I , the Latter used -I to specify include directories
38 #
39 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
40 gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
41
42 ## default file name for AutoGen
43 gAutoGenCodeFileName = "AutoGen.c"
44 gAutoGenHeaderFileName = "AutoGen.h"
45 gAutoGenStringFileName = "%(module_name)sStrDefs.h"
46 gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
47 gAutoGenDepexFileName = "%(module_name)s.depex"
48 gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
49 gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
50 gInfSpecVersion = "0x00010017"
51
52 #
53 # Match name = variable
54 #
55 gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
56 #
57 # The format of guid in efivarstore statement likes following and must be correct:
58 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
59 #
60 gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
61
62 #
63 # Template string to generic AsBuilt INF
64 #
65 gAsBuiltInfHeaderString = TemplateString("""${header_comments}
66
67 # DO NOT EDIT
68 # FILE auto-generated
69
70 [Defines]
71 INF_VERSION = ${module_inf_version}
72 BASE_NAME = ${module_name}
73 FILE_GUID = ${module_guid}
74 MODULE_TYPE = ${module_module_type}${BEGIN}
75 VERSION_STRING = ${module_version_string}${END}${BEGIN}
76 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
77 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
78 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
79 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
80 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
81 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
82 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
83 SHADOW = ${module_shadow}${END}${BEGIN}
84 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
85 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
86 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
87 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
88 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
89 SPEC = ${module_spec}${END}${BEGIN}
90 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
91 MODULE_UNI_FILE = ${module_uni_file}${END}
92
93 [Packages.${module_arch}]${BEGIN}
94 ${package_item}${END}
95
96 [Binaries.${module_arch}]${BEGIN}
97 ${binary_item}${END}
98
99 [PatchPcd.${module_arch}]${BEGIN}
100 ${patchablepcd_item}
101 ${END}
102
103 [Protocols.${module_arch}]${BEGIN}
104 ${protocol_item}
105 ${END}
106
107 [Ppis.${module_arch}]${BEGIN}
108 ${ppi_item}
109 ${END}
110
111 [Guids.${module_arch}]${BEGIN}
112 ${guid_item}
113 ${END}
114
115 [PcdEx.${module_arch}]${BEGIN}
116 ${pcd_item}
117 ${END}
118
119 [LibraryClasses.${module_arch}]
120 ## @LIB_INSTANCES${BEGIN}
121 # ${libraryclasses_item}${END}
122
123 ${depexsection_item}
124
125 ${userextension_tianocore_item}
126
127 ${tail_comments}
128
129 [BuildOptions.${module_arch}]
130 ## @AsBuilt${BEGIN}
131 ## ${flags_item}${END}
132 """)
133 #
134 # extend lists contained in a dictionary with lists stored in another dictionary
135 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
136 #
137 def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
138 for Key in CopyFromDict:
139 CopyToDict[Key].extend(CopyFromDict[Key])
140
141 # Create a directory specified by a set of path elements and return the full path
142 def _MakeDir(PathList):
143 RetVal = path.join(*PathList)
144 CreateDirectory(RetVal)
145 return RetVal
146
147 #
148 # Convert string to C format array
149 #
150 def _ConvertStringToByteArray(Value):
151 Value = Value.strip()
152 if not Value:
153 return None
154 if Value[0] == '{':
155 if not Value.endswith('}'):
156 return None
157 Value = Value.replace(' ', '').replace('{', '').replace('}', '')
158 ValFields = Value.split(',')
159 try:
160 for Index in range(len(ValFields)):
161 ValFields[Index] = str(int(ValFields[Index], 0))
162 except ValueError:
163 return None
164 Value = '{' + ','.join(ValFields) + '}'
165 return Value
166
167 Unicode = False
168 if Value.startswith('L"'):
169 if not Value.endswith('"'):
170 return None
171 Value = Value[1:]
172 Unicode = True
173 elif not Value.startswith('"') or not Value.endswith('"'):
174 return None
175
176 Value = eval(Value) # translate escape character
177 NewValue = '{'
178 for Index in range(0, len(Value)):
179 if Unicode:
180 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
181 else:
182 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
183 Value = NewValue + '0}'
184 return Value
185
186 ## ModuleAutoGen class
187 #
188 # This class encapsules the AutoGen behaviors for the build tools. In addition to
189 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
190 # to the [depex] section in module's inf file.
191 #
192 class ModuleAutoGen(AutoGen):
193 # call super().__init__ then call the worker function with different parameter count
194 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
195 if not hasattr(self, "_Init"):
196 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
197 self._Init = True
198
199 ## Cache the timestamps of metafiles of every module in a class attribute
200 #
201 TimeDict = {}
202
203 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
204 # check if this module is employed by active platform
205 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
206 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
207 % (MetaFile, Arch))
208 return None
209 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
210
211 ## Initialize ModuleAutoGen
212 #
213 # @param Workspace EdkIIWorkspaceBuild object
214 # @param ModuleFile The path of module file
215 # @param Target Build target (DEBUG, RELEASE)
216 # @param Toolchain Name of tool chain
217 # @param Arch The arch the module supports
218 # @param PlatformFile Platform meta-file
219 #
220 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
221 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
222 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
223
224 self.Workspace = Workspace
225 self.WorkspaceDir = ""
226 self.PlatformInfo = None
227 self.DataPipe = DataPipe
228 self.__init_platform_info__()
229 self.MetaFile = ModuleFile
230 self.SourceDir = self.MetaFile.SubDir
231 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
232
233 self.ToolChain = Toolchain
234 self.BuildTarget = Target
235 self.Arch = Arch
236 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
237 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
238
239 self.IsCodeFileCreated = False
240 self.IsAsBuiltInfCreated = False
241 self.DepexGenerated = False
242
243 self.BuildDatabase = self.Workspace.BuildDatabase
244 self.BuildRuleOrder = None
245 self.BuildTime = 0
246
247 self._GuidComments = OrderedListDict()
248 self._ProtocolComments = OrderedListDict()
249 self._PpiComments = OrderedListDict()
250 self._BuildTargets = None
251 self._IntroBuildTargetList = None
252 self._FinalBuildTargetList = None
253 self._FileTypes = None
254
255 self.AutoGenDepSet = set()
256 self.ReferenceModules = []
257 self.ConstPcd = {}
258 self.Makefile = None
259 self.FileDependCache = {}
260
261 def __init_platform_info__(self):
262 pinfo = self.DataPipe.Get("P_Info")
263 self.WorkspaceDir = pinfo.get("WorkspaceDir")
264 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
265 ## hash() operator of ModuleAutoGen
266 #
267 # The module file path and arch string will be used to represent
268 # hash value of this object
269 #
270 # @retval int Hash value of the module file path and arch
271 #
272 @cached_class_function
273 def __hash__(self):
274 return hash((self.MetaFile, self.Arch))
275 def __repr__(self):
276 return "%s [%s]" % (self.MetaFile, self.Arch)
277
278 # Get FixedAtBuild Pcds of this Module
279 @cached_property
280 def FixedAtBuildPcds(self):
281 RetVal = []
282 for Pcd in self.ModulePcdList:
283 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
284 continue
285 if Pcd not in RetVal:
286 RetVal.append(Pcd)
287 return RetVal
288
289 @cached_property
290 def FixedVoidTypePcds(self):
291 RetVal = {}
292 for Pcd in self.FixedAtBuildPcds:
293 if Pcd.DatumType == TAB_VOID:
294 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
295 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
296 return RetVal
297
298 @property
299 def UniqueBaseName(self):
300 ModuleNames = self.DataPipe.Get("M_Name")
301 if not ModuleNames:
302 return self.Name
303 return ModuleNames.get((self.Name,self.MetaFile),self.Name)
304
305 # Macros could be used in build_rule.txt (also Makefile)
306 @cached_property
307 def Macros(self):
308 return OrderedDict((
309 ("WORKSPACE" ,self.WorkspaceDir),
310 ("MODULE_NAME" ,self.Name),
311 ("MODULE_NAME_GUID" ,self.UniqueBaseName),
312 ("MODULE_GUID" ,self.Guid),
313 ("MODULE_VERSION" ,self.Version),
314 ("MODULE_TYPE" ,self.ModuleType),
315 ("MODULE_FILE" ,str(self.MetaFile)),
316 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
317 ("MODULE_RELATIVE_DIR" ,self.SourceDir),
318 ("MODULE_DIR" ,self.SourceDir),
319 ("BASE_NAME" ,self.Name),
320 ("ARCH" ,self.Arch),
321 ("TOOLCHAIN" ,self.ToolChain),
322 ("TOOLCHAIN_TAG" ,self.ToolChain),
323 ("TOOL_CHAIN_TAG" ,self.ToolChain),
324 ("TARGET" ,self.BuildTarget),
325 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
326 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
327 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
328 ("MODULE_BUILD_DIR" ,self.BuildDir),
329 ("OUTPUT_DIR" ,self.OutputDir),
330 ("DEBUG_DIR" ,self.DebugDir),
331 ("DEST_DIR_OUTPUT" ,self.OutputDir),
332 ("DEST_DIR_DEBUG" ,self.DebugDir),
333 ("PLATFORM_NAME" ,self.PlatformInfo.Name),
334 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
335 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
336 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
337 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
338 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
339 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
340 ))
341
342 ## Return the module build data object
343 @cached_property
344 def Module(self):
345 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
346
347 ## Return the module name
348 @cached_property
349 def Name(self):
350 return self.Module.BaseName
351
352 ## Return the module DxsFile if exist
353 @cached_property
354 def DxsFile(self):
355 return self.Module.DxsFile
356
357 ## Return the module meta-file GUID
358 @cached_property
359 def Guid(self):
360 #
361 # To build same module more than once, the module path with FILE_GUID overridden has
362 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
363 # in DSC. The overridden GUID can be retrieved from file name
364 #
365 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
366 #
367 # Length of GUID is 36
368 #
369 return os.path.basename(self.MetaFile.Path)[:36]
370 return self.Module.Guid
371
372 ## Return the module version
373 @cached_property
374 def Version(self):
375 return self.Module.Version
376
377 ## Return the module type
378 @cached_property
379 def ModuleType(self):
380 return self.Module.ModuleType
381
382 ## Return the component type (for Edk.x style of module)
383 @cached_property
384 def ComponentType(self):
385 return self.Module.ComponentType
386
387 ## Return the build type
388 @cached_property
389 def BuildType(self):
390 return self.Module.BuildType
391
392 ## Return the PCD_IS_DRIVER setting
393 @cached_property
394 def PcdIsDriver(self):
395 return self.Module.PcdIsDriver
396
397 ## Return the autogen version, i.e. module meta-file version
398 @cached_property
399 def AutoGenVersion(self):
400 return self.Module.AutoGenVersion
401
402 ## Check if the module is library or not
403 @cached_property
404 def IsLibrary(self):
405 return bool(self.Module.LibraryClass)
406
407 ## Check if the module is binary module or not
408 @cached_property
409 def IsBinaryModule(self):
410 return self.Module.IsBinaryModule
411
412 ## Return the directory to store intermediate files of the module
413 @cached_property
414 def BuildDir(self):
415 return _MakeDir((
416 self.PlatformInfo.BuildDir,
417 self.Arch,
418 self.SourceDir,
419 self.MetaFile.BaseName
420 ))
421
422 ## Return the directory to store the intermediate object files of the module
423 @cached_property
424 def OutputDir(self):
425 return _MakeDir((self.BuildDir, "OUTPUT"))
426
427 ## Return the directory path to store ffs file
428 @cached_property
429 def FfsOutputDir(self):
430 if GlobalData.gFdfParser:
431 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
432 return ''
433
434 ## Return the directory to store auto-gened source files of the module
435 @cached_property
436 def DebugDir(self):
437 return _MakeDir((self.BuildDir, "DEBUG"))
438
439 ## Return the path of custom file
440 @cached_property
441 def CustomMakefile(self):
442 RetVal = {}
443 for Type in self.Module.CustomMakefile:
444 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
445 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
446 RetVal[MakeType] = File
447 return RetVal
448
449 ## Return the directory of the makefile
450 #
451 # @retval string The directory string of module's makefile
452 #
453 @cached_property
454 def MakeFileDir(self):
455 return self.BuildDir
456
457 ## Return build command string
458 #
459 # @retval string Build command string
460 #
461 @cached_property
462 def BuildCommand(self):
463 return self.PlatformInfo.BuildCommand
464
465 ## Get Module package and Platform package
466 #
467 # @retval list The list of package object
468 #
469 @cached_property
470 def PackageList(self):
471 PkagList = []
472 if self.Module.Packages:
473 PkagList.extend(self.Module.Packages)
474 Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
475 for Package in Platform.Packages:
476 if Package in PkagList:
477 continue
478 PkagList.append(Package)
479 return PkagList
480
481 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
482 #
483 # @retval list The list of package object
484 #
485 @cached_property
486 def DerivedPackageList(self):
487 PackageList = []
488 PackageList.extend(self.PackageList)
489 for M in self.DependentLibraryList:
490 for Package in M.Packages:
491 if Package in PackageList:
492 continue
493 PackageList.append(Package)
494 return PackageList
495
496 ## Get the depex string
497 #
498 # @return : a string contain all depex expression.
499 def _GetDepexExpresionString(self):
500 DepexStr = ''
501 DepexList = []
502 ## DPX_SOURCE IN Define section.
503 if self.Module.DxsFile:
504 return DepexStr
505 for M in [self.Module] + self.DependentLibraryList:
506 Filename = M.MetaFile.Path
507 InfObj = InfSectionParser.InfSectionParser(Filename)
508 DepexExpressionList = InfObj.GetDepexExpresionList()
509 for DepexExpression in DepexExpressionList:
510 for key in DepexExpression:
511 Arch, ModuleType = key
512 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
513 # the type of build module is USER_DEFINED.
514 # All different DEPEX section tags would be copied into the As Built INF file
515 # and there would be separate DEPEX section tags
516 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
517 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
518 DepexList.append({(Arch, ModuleType): DepexExpr})
519 else:
520 if Arch.upper() == TAB_ARCH_COMMON or \
521 (Arch.upper() == self.Arch.upper() and \
522 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
523 DepexList.append({(Arch, ModuleType): DepexExpr})
524
525 #the type of build module is USER_DEFINED.
526 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
527 for Depex in DepexList:
528 for key in Depex:
529 DepexStr += '[Depex.%s.%s]\n' % key
530 DepexStr += '\n'.join('# '+ val for val in Depex[key])
531 DepexStr += '\n\n'
532 if not DepexStr:
533 return '[Depex.%s]\n' % self.Arch
534 return DepexStr
535
536 #the type of build module not is USER_DEFINED.
537 Count = 0
538 for Depex in DepexList:
539 Count += 1
540 if DepexStr != '':
541 DepexStr += ' AND '
542 DepexStr += '('
543 for D in Depex.values():
544 DepexStr += ' '.join(val for val in D)
545 Index = DepexStr.find('END')
546 if Index > -1 and Index == len(DepexStr) - 3:
547 DepexStr = DepexStr[:-3]
548 DepexStr = DepexStr.strip()
549 DepexStr += ')'
550 if Count == 1:
551 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
552 if not DepexStr:
553 return '[Depex.%s]\n' % self.Arch
554 return '[Depex.%s]\n# ' % self.Arch + DepexStr
555
556 ## Merge dependency expression
557 #
558 # @retval list The token list of the dependency expression after parsed
559 #
560 @cached_property
561 def DepexList(self):
562 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
563 return {}
564
565 DepexList = []
566 #
567 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
568 #
569 FixedVoidTypePcds = {}
570 for M in [self] + self.LibraryAutoGenList:
571 FixedVoidTypePcds.update(M.FixedVoidTypePcds)
572 for M in [self] + self.LibraryAutoGenList:
573 Inherited = False
574 for D in M.Module.Depex[self.Arch, self.ModuleType]:
575 if DepexList != []:
576 DepexList.append('AND')
577 DepexList.append('(')
578 #replace D with value if D is FixedAtBuild PCD
579 NewList = []
580 for item in D:
581 if '.' not in item:
582 NewList.append(item)
583 else:
584 try:
585 Value = FixedVoidTypePcds[item]
586 if len(Value.split(',')) != 16:
587 EdkLogger.error("build", FORMAT_INVALID,
588 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
589 NewList.append(Value)
590 except:
591 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
592
593 DepexList.extend(NewList)
594 if DepexList[-1] == 'END': # no need of a END at this time
595 DepexList.pop()
596 DepexList.append(')')
597 Inherited = True
598 if Inherited:
599 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
600 if 'BEFORE' in DepexList or 'AFTER' in DepexList:
601 break
602 if len(DepexList) > 0:
603 EdkLogger.verbose('')
604 return {self.ModuleType:DepexList}
605
606 ## Merge dependency expression
607 #
608 # @retval list The token list of the dependency expression after parsed
609 #
610 @cached_property
611 def DepexExpressionDict(self):
612 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
613 return {}
614
615 DepexExpressionString = ''
616 #
617 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
618 #
619 for M in [self.Module] + self.DependentLibraryList:
620 Inherited = False
621 for D in M.DepexExpression[self.Arch, self.ModuleType]:
622 if DepexExpressionString != '':
623 DepexExpressionString += ' AND '
624 DepexExpressionString += '('
625 DepexExpressionString += D
626 DepexExpressionString = DepexExpressionString.rstrip('END').strip()
627 DepexExpressionString += ')'
628 Inherited = True
629 if Inherited:
630 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
631 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
632 break
633 if len(DepexExpressionString) > 0:
634 EdkLogger.verbose('')
635
636 return {self.ModuleType:DepexExpressionString}
637
638 # Get the tiano core user extension, it is contain dependent library.
639 # @retval: a list contain tiano core userextension.
640 #
641 def _GetTianoCoreUserExtensionList(self):
642 TianoCoreUserExtentionList = []
643 for M in [self.Module] + self.DependentLibraryList:
644 Filename = M.MetaFile.Path
645 InfObj = InfSectionParser.InfSectionParser(Filename)
646 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
647 for TianoCoreUserExtent in TianoCoreUserExtenList:
648 for Section in TianoCoreUserExtent:
649 ItemList = Section.split(TAB_SPLIT)
650 Arch = self.Arch
651 if len(ItemList) == 4:
652 Arch = ItemList[3]
653 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
654 TianoCoreList = []
655 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
656 TianoCoreList.extend(TianoCoreUserExtent[Section][:])
657 TianoCoreList.append('\n')
658 TianoCoreUserExtentionList.append(TianoCoreList)
659
660 return TianoCoreUserExtentionList
661
662 ## Return the list of specification version required for the module
663 #
664 # @retval list The list of specification defined in module file
665 #
666 @cached_property
667 def Specification(self):
668 return self.Module.Specification
669
670 ## Tool option for the module build
671 #
672 # @param PlatformInfo The object of PlatformBuildInfo
673 # @retval dict The dict containing valid options
674 #
675 @cached_property
676 def BuildOption(self):
677 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
678 if self.BuildRuleOrder:
679 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
680 return RetVal
681
682 ## Get include path list from tool option for the module build
683 #
684 # @retval list The include path list
685 #
686 @cached_property
687 def BuildOptionIncPathList(self):
688 #
689 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
690 # is the former use /I , the Latter used -I to specify include directories
691 #
692 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
693 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
694 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
695 BuildOptIncludeRegEx = gBuildOptIncludePatternOther
696 else:
697 #
698 # New ToolChainFamily, don't known whether there is option to specify include directories
699 #
700 return []
701
702 RetVal = []
703 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
704 try:
705 FlagOption = self.BuildOption[Tool]['FLAGS']
706 except KeyError:
707 FlagOption = ''
708
709 if self.ToolChainFamily != 'RVCT':
710 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
711 else:
712 #
713 # RVCT may specify a list of directory seperated by commas
714 #
715 IncPathList = []
716 for Path in BuildOptIncludeRegEx.findall(FlagOption):
717 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
718 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
719
720 #
721 # EDK II modules must not reference header files outside of the packages they depend on or
722 # within the module's directory tree. Report error if violation.
723 #
724 if GlobalData.gDisableIncludePathCheck == False:
725 for Path in IncPathList:
726 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
727 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
728 EdkLogger.error("build",
729 PARAMETER_INVALID,
730 ExtraData=ErrMsg,
731 File=str(self.MetaFile))
732 RetVal += IncPathList
733 return RetVal
734
735 ## Return a list of files which can be built from source
736 #
737 # What kind of files can be built is determined by build rules in
738 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
739 #
740 @cached_property
741 def SourceFileList(self):
742 RetVal = []
743 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
744 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
745 for F in self.Module.Sources:
746 # match tool chain
747 if F.TagName not in ToolChainTagSet:
748 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
749 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
750 continue
751 # match tool chain family or build rule family
752 if F.ToolChainFamily not in ToolChainFamilySet:
753 EdkLogger.debug(
754 EdkLogger.DEBUG_0,
755 "The file [%s] must be built by tools of [%s], " \
756 "but current toolchain family is [%s], buildrule family is [%s]" \
757 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
758 continue
759
760 # add the file path into search path list for file including
761 if F.Dir not in self.IncludePathList:
762 self.IncludePathList.insert(0, F.Dir)
763 RetVal.append(F)
764
765 self._MatchBuildRuleOrder(RetVal)
766
767 for F in RetVal:
768 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
769 return RetVal
770
771 def _MatchBuildRuleOrder(self, FileList):
772 Order_Dict = {}
773 self.BuildOption
774 for SingleFile in FileList:
775 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
776 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
777 if key in Order_Dict:
778 Order_Dict[key].append(SingleFile.Ext)
779 else:
780 Order_Dict[key] = [SingleFile.Ext]
781
782 RemoveList = []
783 for F in Order_Dict:
784 if len(Order_Dict[F]) > 1:
785 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
786 for Ext in Order_Dict[F][1:]:
787 RemoveList.append(F + Ext)
788
789 for item in RemoveList:
790 FileList.remove(item)
791
792 return FileList
793
794 ## Return the list of unicode files
795 @cached_property
796 def UnicodeFileList(self):
797 return self.FileTypes.get(TAB_UNICODE_FILE,[])
798
799 ## Return the list of vfr files
800 @cached_property
801 def VfrFileList(self):
802 return self.FileTypes.get(TAB_VFR_FILE, [])
803
804 ## Return the list of Image Definition files
805 @cached_property
806 def IdfFileList(self):
807 return self.FileTypes.get(TAB_IMAGE_FILE,[])
808
809 ## Return a list of files which can be built from binary
810 #
811 # "Build" binary files are just to copy them to build directory.
812 #
813 # @retval list The list of files which can be built later
814 #
815 @cached_property
816 def BinaryFileList(self):
817 RetVal = []
818 for F in self.Module.Binaries:
819 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
820 continue
821 RetVal.append(F)
822 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
823 return RetVal
824
825 @cached_property
826 def BuildRules(self):
827 RetVal = {}
828 BuildRuleDatabase = self.PlatformInfo.BuildRule
829 for Type in BuildRuleDatabase.FileTypeList:
830 #first try getting build rule by BuildRuleFamily
831 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
832 if not RuleObject:
833 # build type is always module type, but ...
834 if self.ModuleType != self.BuildType:
835 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
836 #second try getting build rule by ToolChainFamily
837 if not RuleObject:
838 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
839 if not RuleObject:
840 # build type is always module type, but ...
841 if self.ModuleType != self.BuildType:
842 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
843 if not RuleObject:
844 continue
845 RuleObject = RuleObject.Instantiate(self.Macros)
846 RetVal[Type] = RuleObject
847 for Ext in RuleObject.SourceFileExtList:
848 RetVal[Ext] = RuleObject
849 return RetVal
850
851 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
852 if self._BuildTargets is None:
853 self._IntroBuildTargetList = set()
854 self._FinalBuildTargetList = set()
855 self._BuildTargets = defaultdict(set)
856 self._FileTypes = defaultdict(set)
857
858 if not BinaryFileList:
859 BinaryFileList = self.BinaryFileList
860
861 SubDirectory = os.path.join(self.OutputDir, File.SubDir)
862 if not os.path.exists(SubDirectory):
863 CreateDirectory(SubDirectory)
864 LastTarget = None
865 RuleChain = set()
866 SourceList = [File]
867 Index = 0
868 #
869 # Make sure to get build rule order value
870 #
871 self.BuildOption
872
873 while Index < len(SourceList):
874 Source = SourceList[Index]
875 Index = Index + 1
876
877 if Source != File:
878 CreateDirectory(Source.Dir)
879
880 if File.IsBinary and File == Source and File in BinaryFileList:
881 # Skip all files that are not binary libraries
882 if not self.IsLibrary:
883 continue
884 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
885 elif FileType in self.BuildRules:
886 RuleObject = self.BuildRules[FileType]
887 elif Source.Ext in self.BuildRules:
888 RuleObject = self.BuildRules[Source.Ext]
889 else:
890 # stop at no more rules
891 if LastTarget:
892 self._FinalBuildTargetList.add(LastTarget)
893 break
894
895 FileType = RuleObject.SourceFileType
896 self._FileTypes[FileType].add(Source)
897
898 # stop at STATIC_LIBRARY for library
899 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
900 if LastTarget:
901 self._FinalBuildTargetList.add(LastTarget)
902 break
903
904 Target = RuleObject.Apply(Source, self.BuildRuleOrder)
905 if not Target:
906 if LastTarget:
907 self._FinalBuildTargetList.add(LastTarget)
908 break
909 elif not Target.Outputs:
910 # Only do build for target with outputs
911 self._FinalBuildTargetList.add(Target)
912
913 self._BuildTargets[FileType].add(Target)
914
915 if not Source.IsBinary and Source == File:
916 self._IntroBuildTargetList.add(Target)
917
918 # to avoid cyclic rule
919 if FileType in RuleChain:
920 break
921
922 RuleChain.add(FileType)
923 SourceList.extend(Target.Outputs)
924 LastTarget = Target
925 FileType = TAB_UNKNOWN_FILE
926
927 @cached_property
928 def Targets(self):
929 if self._BuildTargets is None:
930 self._IntroBuildTargetList = set()
931 self._FinalBuildTargetList = set()
932 self._BuildTargets = defaultdict(set)
933 self._FileTypes = defaultdict(set)
934
935 #TRICK: call SourceFileList property to apply build rule for source files
936 self.SourceFileList
937
938 #TRICK: call _GetBinaryFileList to apply build rule for binary files
939 self.BinaryFileList
940
941 return self._BuildTargets
942
943 @cached_property
944 def IntroTargetList(self):
945 self.Targets
946 return self._IntroBuildTargetList
947
948 @cached_property
949 def CodaTargetList(self):
950 self.Targets
951 return self._FinalBuildTargetList
952
953 @cached_property
954 def FileTypes(self):
955 self.Targets
956 return self._FileTypes
957
958 ## Get the list of package object the module depends on and the Platform depends on
959 #
960 # @retval list The package object list
961 #
962 @cached_property
963 def DependentPackageList(self):
964 return self.PackageList
965
966 ## Return the list of auto-generated code file
967 #
968 # @retval list The list of auto-generated file
969 #
970 @cached_property
971 def AutoGenFileList(self):
972 AutoGenUniIdf = self.BuildType != 'UEFI_HII'
973 UniStringBinBuffer = BytesIO()
974 IdfGenBinBuffer = BytesIO()
975 RetVal = {}
976 AutoGenC = TemplateString()
977 AutoGenH = TemplateString()
978 StringH = TemplateString()
979 StringIdf = TemplateString()
980 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
981 #
982 # AutoGen.c is generated if there are library classes in inf, or there are object files
983 #
984 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
985 or TAB_OBJECT_FILE in self.FileTypes):
986 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
987 RetVal[AutoFile] = str(AutoGenC)
988 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
989 if str(AutoGenH) != "":
990 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
991 RetVal[AutoFile] = str(AutoGenH)
992 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
993 if str(StringH) != "":
994 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
995 RetVal[AutoFile] = str(StringH)
996 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
997 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
998 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
999 RetVal[AutoFile] = UniStringBinBuffer.getvalue()
1000 AutoFile.IsBinary = True
1001 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1002 if UniStringBinBuffer is not None:
1003 UniStringBinBuffer.close()
1004 if str(StringIdf) != "":
1005 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
1006 RetVal[AutoFile] = str(StringIdf)
1007 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1008 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
1009 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
1010 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
1011 AutoFile.IsBinary = True
1012 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1013 if IdfGenBinBuffer is not None:
1014 IdfGenBinBuffer.close()
1015 return RetVal
1016
1017 ## Return the list of library modules explicitly or implicitly used by this module
1018 @cached_property
1019 def DependentLibraryList(self):
1020 # only merge library classes and PCD for non-library module
1021 if self.IsLibrary:
1022 return []
1023 return self.PlatformInfo.ApplyLibraryInstance(self.Module)
1024
1025 ## Get the list of PCDs from current module
1026 #
1027 # @retval list The list of PCD
1028 #
1029 @cached_property
1030 def ModulePcdList(self):
1031 # apply PCD settings from platform
1032 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
1033
1034 return RetVal
1035 @cached_property
1036 def _PcdComments(self):
1037 ReVal = OrderedListDict()
1038 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
1039 if not self.IsLibrary:
1040 for Library in self.DependentLibraryList:
1041 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
1042 return ReVal
1043
1044 ## Get the list of PCDs from dependent libraries
1045 #
1046 # @retval list The list of PCD
1047 #
1048 @cached_property
1049 def LibraryPcdList(self):
1050 if self.IsLibrary:
1051 return []
1052 RetVal = []
1053 Pcds = set()
1054 # get PCDs from dependent libraries
1055 for Library in self.DependentLibraryList:
1056 PcdsInLibrary = OrderedDict()
1057 for Key in Library.Pcds:
1058 # skip duplicated PCDs
1059 if Key in self.Module.Pcds or Key in Pcds:
1060 continue
1061 Pcds.add(Key)
1062 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
1063 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))
1064 return RetVal
1065
1066 ## Get the GUID value mapping
1067 #
1068 # @retval dict The mapping between GUID cname and its value
1069 #
1070 @cached_property
1071 def GuidList(self):
1072 RetVal = self.Module.Guids
1073 for Library in self.DependentLibraryList:
1074 RetVal.update(Library.Guids)
1075 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
1076 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
1077 return RetVal
1078
1079 @cached_property
1080 def GetGuidsUsedByPcd(self):
1081 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
1082 for Library in self.DependentLibraryList:
1083 RetVal.update(Library.GetGuidsUsedByPcd())
1084 return RetVal
1085 ## Get the protocol value mapping
1086 #
1087 # @retval dict The mapping between protocol cname and its value
1088 #
1089 @cached_property
1090 def ProtocolList(self):
1091 RetVal = OrderedDict(self.Module.Protocols)
1092 for Library in self.DependentLibraryList:
1093 RetVal.update(Library.Protocols)
1094 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
1095 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
1096 return RetVal
1097
1098 ## Get the PPI value mapping
1099 #
1100 # @retval dict The mapping between PPI cname and its value
1101 #
1102 @cached_property
1103 def PpiList(self):
1104 RetVal = OrderedDict(self.Module.Ppis)
1105 for Library in self.DependentLibraryList:
1106 RetVal.update(Library.Ppis)
1107 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
1108 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
1109 return RetVal
1110
1111 ## Get the list of include search path
1112 #
1113 # @retval list The list path
1114 #
1115 @cached_property
1116 def IncludePathList(self):
1117 RetVal = []
1118 RetVal.append(self.MetaFile.Dir)
1119 RetVal.append(self.DebugDir)
1120
1121 for Package in self.PackageList:
1122 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1123 if PackageDir not in RetVal:
1124 RetVal.append(PackageDir)
1125 IncludesList = Package.Includes
1126 if Package._PrivateIncludes:
1127 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
1128 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1129 for Inc in IncludesList:
1130 if Inc not in RetVal:
1131 RetVal.append(str(Inc))
1132 RetVal.extend(self.IncPathFromBuildOptions)
1133 return RetVal
1134
1135 @cached_property
1136 def IncPathFromBuildOptions(self):
1137 IncPathList = []
1138 for tool in self.BuildOption:
1139 if 'FLAGS' in self.BuildOption[tool]:
1140 flags = self.BuildOption[tool]['FLAGS']
1141 whitespace = False
1142 for flag in flags.split(" "):
1143 flag = flag.strip()
1144 if flag.startswith(("/I","-I")):
1145 if len(flag)>2:
1146 if os.path.exists(flag[2:]):
1147 IncPathList.append(flag[2:])
1148 else:
1149 whitespace = True
1150 continue
1151 if whitespace and flag:
1152 if os.path.exists(flag):
1153 IncPathList.append(flag)
1154 whitespace = False
1155 return IncPathList
1156
1157 @cached_property
1158 def IncludePathLength(self):
1159 return sum(len(inc)+1 for inc in self.IncludePathList)
1160
1161 ## Get the list of include paths from the packages
1162 #
1163 # @IncludesList list The list path
1164 #
1165 @cached_property
1166 def PackageIncludePathList(self):
1167 IncludesList = []
1168 for Package in self.PackageList:
1169 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1170 IncludesList = Package.Includes
1171 if Package._PrivateIncludes:
1172 if not self.MetaFile.Path.startswith(PackageDir):
1173 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1174 return IncludesList
1175
1176 ## Get HII EX PCDs which maybe used by VFR
1177 #
1178 # efivarstore used by VFR may relate with HII EX PCDs
1179 # Get the variable name and GUID from efivarstore and HII EX PCD
1180 # List the HII EX PCDs in As Built INF if both name and GUID match.
1181 #
1182 # @retval list HII EX PCDs
1183 #
1184 def _GetPcdsMaybeUsedByVfr(self):
1185 if not self.SourceFileList:
1186 return []
1187
1188 NameGuids = set()
1189 for SrcFile in self.SourceFileList:
1190 if SrcFile.Ext.lower() != '.vfr':
1191 continue
1192 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
1193 if not os.path.exists(Vfri):
1194 continue
1195 VfriFile = open(Vfri, 'r')
1196 Content = VfriFile.read()
1197 VfriFile.close()
1198 Pos = Content.find('efivarstore')
1199 while Pos != -1:
1200 #
1201 # Make sure 'efivarstore' is the start of efivarstore statement
1202 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1203 #
1204 Index = Pos - 1
1205 while Index >= 0 and Content[Index] in ' \t\r\n':
1206 Index -= 1
1207 if Index >= 0 and Content[Index] != ';':
1208 Pos = Content.find('efivarstore', Pos + len('efivarstore'))
1209 continue
1210 #
1211 # 'efivarstore' must be followed by name and guid
1212 #
1213 Name = gEfiVarStoreNamePattern.search(Content, Pos)
1214 if not Name:
1215 break
1216 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
1217 if not Guid:
1218 break
1219 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
1220 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
1221 Pos = Content.find('efivarstore', Name.end())
1222 if not NameGuids:
1223 return []
1224 HiiExPcds = []
1225 for Pcd in self.PlatformInfo.Pcds.values():
1226 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
1227 continue
1228 for SkuInfo in Pcd.SkuInfoList.values():
1229 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
1230 if not Value:
1231 continue
1232 Name = _ConvertStringToByteArray(SkuInfo.VariableName)
1233 Guid = GuidStructureStringToGuidString(Value)
1234 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
1235 HiiExPcds.append(Pcd)
1236 break
1237
1238 return HiiExPcds
1239
1240 def _GenOffsetBin(self):
1241 VfrUniBaseName = {}
1242 for SourceFile in self.Module.Sources:
1243 if SourceFile.Type.upper() == ".VFR" :
1244 #
1245 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1246 #
1247 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
1248 elif SourceFile.Type.upper() == ".UNI" :
1249 #
1250 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1251 #
1252 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
1253
1254 if not VfrUniBaseName:
1255 return None
1256 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
1257 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
1258 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
1259 if not VfrUniOffsetList:
1260 return None
1261
1262 OutputName = '%sOffset.bin' % self.Name
1263 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
1264
1265 try:
1266 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
1267 except:
1268 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
1269
1270 # Use a instance of BytesIO to cache data
1271 fStringIO = BytesIO()
1272
1273 for Item in VfrUniOffsetList:
1274 if (Item[0].find("Strings") != -1):
1275 #
1276 # UNI offset in image.
1277 # GUID + Offset
1278 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1279 #
1280 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1281 fStringIO.write(UniGuid)
1282 UniValue = pack ('Q', int (Item[1], 16))
1283 fStringIO.write (UniValue)
1284 else:
1285 #
1286 # VFR binary offset in image.
1287 # GUID + Offset
1288 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1289 #
1290 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1291 fStringIO.write(VfrGuid)
1292 VfrValue = pack ('Q', int (Item[1], 16))
1293 fStringIO.write (VfrValue)
1294 #
1295 # write data into file.
1296 #
1297 try :
1298 fInputfile.write (fStringIO.getvalue())
1299 except:
1300 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
1301 "file been locked or using by other applications." %UniVfrOffsetFileName, None)
1302
1303 fStringIO.close ()
1304 fInputfile.close ()
1305 return OutputName
1306
1307 @cached_property
1308 def OutputFile(self):
1309 retVal = set()
1310
1311 for Root, Dirs, Files in os.walk(self.BuildDir):
1312 for File in Files:
1313 # lib file is already added through above CodaTargetList, skip it here
1314 if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):
1315 NewFile = path.join(Root, File)
1316 retVal.add(NewFile)
1317
1318 for Root, Dirs, Files in os.walk(self.FfsOutputDir):
1319 for File in Files:
1320 NewFile = path.join(Root, File)
1321 retVal.add(NewFile)
1322
1323 return retVal
1324
1325 ## Create AsBuilt INF file the module
1326 #
1327 def CreateAsBuiltInf(self):
1328
1329 if self.IsAsBuiltInfCreated:
1330 return
1331
1332 # Skip INF file generation for libraries
1333 if self.IsLibrary:
1334 return
1335
1336 # Skip the following code for modules with no source files
1337 if not self.SourceFileList:
1338 return
1339
1340 # Skip the following code for modules without any binary files
1341 if self.BinaryFileList:
1342 return
1343
1344 ### TODO: How to handles mixed source and binary modules
1345
1346 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1347 # Also find all packages that the DynamicEx PCDs depend on
1348 Pcds = []
1349 PatchablePcds = []
1350 Packages = []
1351 PcdCheckList = []
1352 PcdTokenSpaceList = []
1353 for Pcd in self.ModulePcdList + self.LibraryPcdList:
1354 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
1355 PatchablePcds.append(Pcd)
1356 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
1357 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
1358 if Pcd not in Pcds:
1359 Pcds.append(Pcd)
1360 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
1361 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
1362 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
1363 GuidList = OrderedDict(self.GuidList)
1364 for TokenSpace in self.GetGuidsUsedByPcd:
1365 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1366 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1367 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
1368 GuidList.pop(TokenSpace)
1369 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
1370 for Package in self.DerivedPackageList:
1371 if Package in Packages:
1372 continue
1373 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
1374 Found = False
1375 for Index in range(len(BeChecked)):
1376 for Item in CheckList[Index]:
1377 if Item in BeChecked[Index]:
1378 Packages.append(Package)
1379 Found = True
1380 break
1381 if Found:
1382 break
1383
1384 VfrPcds = self._GetPcdsMaybeUsedByVfr()
1385 for Pkg in self.PlatformInfo.PackageList:
1386 if Pkg in Packages:
1387 continue
1388 for VfrPcd in VfrPcds:
1389 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
1390 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
1391 Packages.append(Pkg)
1392 break
1393
1394 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
1395 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
1396 Guid = self.Guid
1397 MDefs = self.Module.Defines
1398
1399 AsBuiltInfDict = {
1400 'module_name' : self.Name,
1401 'module_guid' : Guid,
1402 'module_module_type' : ModuleType,
1403 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
1404 'pcd_is_driver_string' : [],
1405 'module_uefi_specification_version' : [],
1406 'module_pi_specification_version' : [],
1407 'module_entry_point' : self.Module.ModuleEntryPointList,
1408 'module_unload_image' : self.Module.ModuleUnloadImageList,
1409 'module_constructor' : self.Module.ConstructorList,
1410 'module_destructor' : self.Module.DestructorList,
1411 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
1412 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
1413 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
1414 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
1415 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
1416 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
1417 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
1418 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
1419 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
1420 'module_arch' : self.Arch,
1421 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
1422 'binary_item' : [],
1423 'patchablepcd_item' : [],
1424 'pcd_item' : [],
1425 'protocol_item' : [],
1426 'ppi_item' : [],
1427 'guid_item' : [],
1428 'flags_item' : [],
1429 'libraryclasses_item' : []
1430 }
1431
1432 if 'MODULE_UNI_FILE' in MDefs:
1433 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
1434 if os.path.isfile(UNIFile):
1435 shutil.copy2(UNIFile, self.OutputDir)
1436
1437 if self.AutoGenVersion > int(gInfSpecVersion, 0):
1438 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
1439 else:
1440 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
1441
1442 if DriverType:
1443 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
1444
1445 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
1446 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
1447 if 'PI_SPECIFICATION_VERSION' in self.Specification:
1448 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
1449
1450 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1451 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1452 for Item in self.CodaTargetList:
1453 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1454 if os.path.isabs(File):
1455 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
1456 if Item.Target.Ext.lower() == '.aml':
1457 AsBuiltInfDict['binary_item'].append('ASL|' + File)
1458 elif Item.Target.Ext.lower() == '.acpi':
1459 AsBuiltInfDict['binary_item'].append('ACPI|' + File)
1460 elif Item.Target.Ext.lower() == '.efi':
1461 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
1462 else:
1463 AsBuiltInfDict['binary_item'].append('BIN|' + File)
1464 if not self.DepexGenerated:
1465 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
1466 if os.path.exists(DepexFile):
1467 self.DepexGenerated = True
1468 if self.DepexGenerated:
1469 if self.ModuleType in [SUP_MODULE_PEIM]:
1470 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
1471 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
1472 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
1473 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
1474 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
1475
1476 Bin = self._GenOffsetBin()
1477 if Bin:
1478 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
1479
1480 for Root, Dirs, Files in os.walk(OutputDir):
1481 for File in Files:
1482 if File.lower().endswith('.pdb'):
1483 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
1484 HeaderComments = self.Module.HeaderComments
1485 StartPos = 0
1486 for Index in range(len(HeaderComments)):
1487 if HeaderComments[Index].find('@BinaryHeader') != -1:
1488 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
1489 StartPos = Index
1490 break
1491 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
1492 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
1493
1494 GenList = [
1495 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
1496 (self.PpiList, self._PpiComments, 'ppi_item'),
1497 (GuidList, self._GuidComments, 'guid_item')
1498 ]
1499 for Item in GenList:
1500 for CName in Item[0]:
1501 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
1502 Entry = Comments + '\n ' + CName if Comments else CName
1503 AsBuiltInfDict[Item[2]].append(Entry)
1504 PatchList = parsePcdInfoFromMapFile(
1505 os.path.join(self.OutputDir, self.Name + '.map'),
1506 os.path.join(self.OutputDir, self.Name + '.efi')
1507 )
1508 if PatchList:
1509 for Pcd in PatchablePcds:
1510 TokenCName = Pcd.TokenCName
1511 for PcdItem in GlobalData.MixedPcd:
1512 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1513 TokenCName = PcdItem[0]
1514 break
1515 for PatchPcd in PatchList:
1516 if TokenCName == PatchPcd[0]:
1517 break
1518 else:
1519 continue
1520 PcdValue = ''
1521 if Pcd.DatumType == 'BOOLEAN':
1522 BoolValue = Pcd.DefaultValue.upper()
1523 if BoolValue == 'TRUE':
1524 Pcd.DefaultValue = '1'
1525 elif BoolValue == 'FALSE':
1526 Pcd.DefaultValue = '0'
1527
1528 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
1529 HexFormat = '0x%02x'
1530 if Pcd.DatumType == TAB_UINT16:
1531 HexFormat = '0x%04x'
1532 elif Pcd.DatumType == TAB_UINT32:
1533 HexFormat = '0x%08x'
1534 elif Pcd.DatumType == TAB_UINT64:
1535 HexFormat = '0x%016x'
1536 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
1537 else:
1538 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
1539 EdkLogger.error("build", AUTOGEN_ERROR,
1540 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
1541 )
1542 ArraySize = int(Pcd.MaxDatumSize, 0)
1543 PcdValue = Pcd.DefaultValue
1544 if PcdValue[0] != '{':
1545 Unicode = False
1546 if PcdValue[0] == 'L':
1547 Unicode = True
1548 PcdValue = PcdValue.lstrip('L')
1549 PcdValue = eval(PcdValue)
1550 NewValue = '{'
1551 for Index in range(0, len(PcdValue)):
1552 if Unicode:
1553 CharVal = ord(PcdValue[Index])
1554 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
1555 + '0x%02x' % (CharVal >> 8) + ', '
1556 else:
1557 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
1558 Padding = '0x00, '
1559 if Unicode:
1560 Padding = Padding * 2
1561 ArraySize = ArraySize // 2
1562 if ArraySize < (len(PcdValue) + 1):
1563 if Pcd.MaxSizeUserSet:
1564 EdkLogger.error("build", AUTOGEN_ERROR,
1565 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1566 )
1567 else:
1568 ArraySize = len(PcdValue) + 1
1569 if ArraySize > len(PcdValue) + 1:
1570 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
1571 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
1572 elif len(PcdValue.split(',')) <= ArraySize:
1573 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
1574 PcdValue += '}'
1575 else:
1576 if Pcd.MaxSizeUserSet:
1577 EdkLogger.error("build", AUTOGEN_ERROR,
1578 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1579 )
1580 else:
1581 ArraySize = len(PcdValue) + 1
1582 PcdItem = '%s.%s|%s|0x%X' % \
1583 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
1584 PcdComments = ''
1585 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1586 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
1587 if PcdComments:
1588 PcdItem = PcdComments + '\n ' + PcdItem
1589 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
1590
1591 for Pcd in Pcds + VfrPcds:
1592 PcdCommentList = []
1593 HiiInfo = ''
1594 TokenCName = Pcd.TokenCName
1595 for PcdItem in GlobalData.MixedPcd:
1596 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1597 TokenCName = PcdItem[0]
1598 break
1599 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
1600 for SkuName in Pcd.SkuInfoList:
1601 SkuInfo = Pcd.SkuInfoList[SkuName]
1602 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
1603 break
1604 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1605 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
1606 if HiiInfo:
1607 UsageIndex = -1
1608 UsageStr = ''
1609 for Index, Comment in enumerate(PcdCommentList):
1610 for Usage in UsageList:
1611 if Comment.find(Usage) != -1:
1612 UsageStr = Usage
1613 UsageIndex = Index
1614 break
1615 if UsageIndex != -1:
1616 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
1617 else:
1618 PcdCommentList.append('## UNDEFINED ' + HiiInfo)
1619 PcdComments = '\n '.join(PcdCommentList)
1620 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
1621 if PcdComments:
1622 PcdEntry = PcdComments + '\n ' + PcdEntry
1623 AsBuiltInfDict['pcd_item'].append(PcdEntry)
1624 for Item in self.BuildOption:
1625 if 'FLAGS' in self.BuildOption[Item]:
1626 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
1627
1628 # Generated LibraryClasses section in comments.
1629 for Library in self.LibraryAutoGenList:
1630 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
1631
1632 # Generated UserExtensions TianoCore section.
1633 # All tianocore user extensions are copied.
1634 UserExtStr = ''
1635 for TianoCore in self._GetTianoCoreUserExtensionList():
1636 UserExtStr += '\n'.join(TianoCore)
1637 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
1638 if os.path.isfile(ExtensionFile):
1639 shutil.copy2(ExtensionFile, self.OutputDir)
1640 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
1641
1642 # Generated depex expression section in comments.
1643 DepexExpression = self._GetDepexExpresionString()
1644 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
1645
1646 AsBuiltInf = TemplateString()
1647 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
1648
1649 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
1650
1651 self.IsAsBuiltInfCreated = True
1652
1653 def CacheCopyFile(self, DestDir, SourceDir, File):
1654 sub_dir = os.path.relpath(File, SourceDir)
1655 destination_file = os.path.join(DestDir, sub_dir)
1656 destination_dir = os.path.dirname(destination_file)
1657 CreateDirectory(destination_dir)
1658 try:
1659 CopyFileOnChange(File, destination_dir)
1660 except:
1661 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
1662 return
1663
1664 def CopyModuleToCache(self):
1665 self.GenPreMakefileHash(GlobalData.gCacheIR)
1666 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1667 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1668 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1669 return False
1670
1671 self.GenMakeHash(GlobalData.gCacheIR)
1672 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1673 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1674 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1675 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1676 return False
1677
1678 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1679 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)
1680 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)
1681
1682 CreateDirectory (FileDir)
1683 self.SaveHashChainFileToCache(GlobalData.gCacheIR)
1684 ModuleFile = path.join(self.OutputDir, self.Name + '.inf')
1685 if os.path.exists(ModuleFile):
1686 CopyFileOnChange(ModuleFile, FileDir)
1687 if not self.OutputFile:
1688 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
1689 self.OutputFile = Ma.Binaries
1690 for File in self.OutputFile:
1691 if os.path.exists(File):
1692 if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):
1693 self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)
1694 else:
1695 self.CacheCopyFile(FileDir, self.OutputDir, File)
1696
1697 def SaveHashChainFileToCache(self, gDict):
1698 if not GlobalData.gBinCacheDest:
1699 return False
1700
1701 self.GenPreMakefileHash(gDict)
1702 if not (self.MetaFile.Path, self.Arch) in gDict or \
1703 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1704 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1705 return False
1706
1707 self.GenMakeHash(gDict)
1708 if not (self.MetaFile.Path, self.Arch) in gDict or \
1709 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1710 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1711 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1712 return False
1713
1714 # save the hash chain list as cache file
1715 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1716 CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
1717 CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)
1718 ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")
1719 MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")
1720 ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")
1721
1722 # save the HashChainDict as json file
1723 CreateDirectory (CacheDestDir)
1724 CreateDirectory (CacheHashDestDir)
1725 try:
1726 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1727 if os.path.exists(ModuleHashPair):
1728 with open(ModuleHashPair, 'r') as f:
1729 ModuleHashPairList = json.load(f)
1730 PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
1731 MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
1732 ModuleHashPairList.append((PreMakeHash, MakeHash))
1733 ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))
1734 with open(ModuleHashPair, 'w') as f:
1735 json.dump(ModuleHashPairList, f, indent=2)
1736 except:
1737 EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)
1738 return False
1739
1740 try:
1741 with open(MakeHashChain, 'w') as f:
1742 json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)
1743 except:
1744 EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)
1745 return False
1746
1747 try:
1748 with open(ModuleFilesChain, 'w') as f:
1749 json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)
1750 except:
1751 EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)
1752 return False
1753
1754 # save the autogenfile and makefile for debug usage
1755 CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")
1756 CreateDirectory (CacheDebugDir)
1757 CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)
1758 if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1759 for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1760 CopyFileOnChange(str(File), CacheDebugDir)
1761
1762 return True
1763
1764 ## Create makefile for the module and its dependent libraries
1765 #
1766 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1767 # dependent libraries will be created
1768 #
1769 @cached_class_function
1770 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
1771 gDict = GlobalData.gCacheIR
1772 if (self.MetaFile.Path, self.Arch) in gDict and \
1773 gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
1774 return
1775
1776 # nest this function inside it's only caller.
1777 def CreateTimeStamp():
1778 FileSet = {self.MetaFile.Path}
1779
1780 for SourceFile in self.Module.Sources:
1781 FileSet.add (SourceFile.Path)
1782
1783 for Lib in self.DependentLibraryList:
1784 FileSet.add (Lib.MetaFile.Path)
1785
1786 for f in self.AutoGenDepSet:
1787 FileSet.add (f.Path)
1788
1789 if os.path.exists (self.TimeStampPath):
1790 os.remove (self.TimeStampPath)
1791
1792 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
1793
1794 # Ignore generating makefile when it is a binary module
1795 if self.IsBinaryModule:
1796 return
1797
1798 self.GenFfsList = GenFfsList
1799
1800 if not self.IsLibrary and CreateLibraryMakeFile:
1801 for LibraryAutoGen in self.LibraryAutoGenList:
1802 LibraryAutoGen.CreateMakeFile()
1803
1804 # CanSkip uses timestamps to determine build skipping
1805 if self.CanSkip():
1806 return
1807
1808 if len(self.CustomMakefile) == 0:
1809 Makefile = GenMake.ModuleMakefile(self)
1810 else:
1811 Makefile = GenMake.CustomMakefile(self)
1812 if Makefile.Generate():
1813 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
1814 (self.Name, self.Arch))
1815 else:
1816 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
1817 (self.Name, self.Arch))
1818
1819 CreateTimeStamp()
1820
1821 MakefileType = Makefile._FileType
1822 MakefileName = Makefile._FILE_NAME_[MakefileType]
1823 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
1824
1825 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1826 MewIR.MakefilePath = MakefilePath
1827 MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1828 MewIR.CreateMakeFileDone = True
1829 with GlobalData.cache_lock:
1830 try:
1831 IR = gDict[(self.MetaFile.Path, self.Arch)]
1832 IR.MakefilePath = MakefilePath
1833 IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1834 IR.CreateMakeFileDone = True
1835 gDict[(self.MetaFile.Path, self.Arch)] = IR
1836 except:
1837 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1838
1839 def CopyBinaryFiles(self):
1840 for File in self.Module.Binaries:
1841 SrcPath = File.Path
1842 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
1843 CopyLongFilePath(SrcPath, DstPath)
1844 ## Create autogen code for the module and its dependent libraries
1845 #
1846 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1847 # dependent libraries will be created
1848 #
1849 def CreateCodeFile(self, CreateLibraryCodeFile=True):
1850 gDict = GlobalData.gCacheIR
1851 if (self.MetaFile.Path, self.Arch) in gDict and \
1852 gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
1853 return
1854
1855 if self.IsCodeFileCreated:
1856 return
1857
1858 # Need to generate PcdDatabase even PcdDriver is binarymodule
1859 if self.IsBinaryModule and self.PcdIsDriver != '':
1860 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
1861 return
1862 if self.IsBinaryModule:
1863 if self.IsLibrary:
1864 self.CopyBinaryFiles()
1865 return
1866
1867 if not self.IsLibrary and CreateLibraryCodeFile:
1868 for LibraryAutoGen in self.LibraryAutoGenList:
1869 LibraryAutoGen.CreateCodeFile()
1870
1871 # CanSkip uses timestamps to determine build skipping
1872 if self.CanSkip():
1873 return
1874 self.LibraryAutoGenList
1875 AutoGenList = []
1876 IgoredAutoGenList = []
1877
1878 for File in self.AutoGenFileList:
1879 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
1880 AutoGenList.append(str(File))
1881 else:
1882 IgoredAutoGenList.append(str(File))
1883
1884
1885 for ModuleType in self.DepexList:
1886 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1887 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
1888 continue
1889
1890 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
1891 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
1892
1893 if len(Dpx.PostfixNotation) != 0:
1894 self.DepexGenerated = True
1895
1896 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
1897 AutoGenList.append(str(DpxFile))
1898 else:
1899 IgoredAutoGenList.append(str(DpxFile))
1900
1901 if IgoredAutoGenList == []:
1902 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
1903 (" ".join(AutoGenList), self.Name, self.Arch))
1904 elif AutoGenList == []:
1905 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
1906 (" ".join(IgoredAutoGenList), self.Name, self.Arch))
1907 else:
1908 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
1909 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
1910
1911 self.IsCodeFileCreated = True
1912 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1913 MewIR.CreateCodeFileDone = True
1914 with GlobalData.cache_lock:
1915 try:
1916 IR = gDict[(self.MetaFile.Path, self.Arch)]
1917 IR.CreateCodeFileDone = True
1918 gDict[(self.MetaFile.Path, self.Arch)] = IR
1919 except:
1920 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1921
1922 return AutoGenList
1923
1924 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1925 @cached_property
1926 def LibraryAutoGenList(self):
1927 RetVal = []
1928 for Library in self.DependentLibraryList:
1929 La = ModuleAutoGen(
1930 self.Workspace,
1931 Library.MetaFile,
1932 self.BuildTarget,
1933 self.ToolChain,
1934 self.Arch,
1935 self.PlatformInfo.MetaFile,
1936 self.DataPipe
1937 )
1938 La.IsLibrary = True
1939 if La not in RetVal:
1940 RetVal.append(La)
1941 for Lib in La.CodaTargetList:
1942 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
1943 return RetVal
1944
1945 def GenModuleHash(self):
1946 # Initialize a dictionary for each arch type
1947 if self.Arch not in GlobalData.gModuleHash:
1948 GlobalData.gModuleHash[self.Arch] = {}
1949
1950 # Early exit if module or library has been hashed and is in memory
1951 if self.Name in GlobalData.gModuleHash[self.Arch]:
1952 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1953
1954 # Initialze hash object
1955 m = hashlib.md5()
1956
1957 # Add Platform level hash
1958 m.update(GlobalData.gPlatformHash.encode('utf-8'))
1959
1960 # Add Package level hash
1961 if self.DependentPackageList:
1962 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
1963 if Pkg.PackageName in GlobalData.gPackageHash:
1964 m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))
1965
1966 # Add Library hash
1967 if self.LibraryAutoGenList:
1968 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
1969 if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
1970 Lib.GenModuleHash()
1971 m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
1972
1973 # Add Module self
1974 with open(str(self.MetaFile), 'rb') as f:
1975 Content = f.read()
1976 m.update(Content)
1977
1978 # Add Module's source files
1979 if self.SourceFileList:
1980 for File in sorted(self.SourceFileList, key=lambda x: str(x)):
1981 f = open(str(File), 'rb')
1982 Content = f.read()
1983 f.close()
1984 m.update(Content)
1985
1986 GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()
1987
1988 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1989
1990 def GenModuleFilesHash(self, gDict):
1991 # Early exit if module or library has been hashed and is in memory
1992 if (self.MetaFile.Path, self.Arch) in gDict:
1993 if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:
1994 return gDict[(self.MetaFile.Path, self.Arch)]
1995
1996 # skip if the module cache already crashed
1997 if (self.MetaFile.Path, self.Arch) in gDict and \
1998 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
1999 return
2000
2001 DependencyFileSet = set()
2002 # Add Module Meta file
2003 DependencyFileSet.add(self.MetaFile)
2004
2005 # Add Module's source files
2006 if self.SourceFileList:
2007 for File in set(self.SourceFileList):
2008 DependencyFileSet.add(File)
2009
2010 # Add modules's include header files
2011 # Search dependency file list for each source file
2012 SourceFileList = []
2013 OutPutFileList = []
2014 for Target in self.IntroTargetList:
2015 SourceFileList.extend(Target.Inputs)
2016 OutPutFileList.extend(Target.Outputs)
2017 if OutPutFileList:
2018 for Item in OutPutFileList:
2019 if Item in SourceFileList:
2020 SourceFileList.remove(Item)
2021 SearchList = []
2022 for file_path in self.IncludePathList + self.BuildOptionIncPathList:
2023 # skip the folders in platform BuildDir which are not been generated yet
2024 if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):
2025 continue
2026 SearchList.append(file_path)
2027 FileDependencyDict = {}
2028 ForceIncludedFile = []
2029 for F in SourceFileList:
2030 # skip the files which are not been generated yet, because
2031 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
2032 if not os.path.exists(F.Path):
2033 continue
2034 FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)
2035
2036 if FileDependencyDict:
2037 for Dependency in FileDependencyDict.values():
2038 DependencyFileSet.update(set(Dependency))
2039
2040 # Caculate all above dependency files hash
2041 # Initialze hash object
2042 FileList = []
2043 m = hashlib.md5()
2044 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2045 if not os.path.exists(str(File)):
2046 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2047 continue
2048 with open(str(File), 'rb') as f:
2049 Content = f.read()
2050 m.update(Content)
2051 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2052
2053
2054 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
2055 MewIR.ModuleFilesHashDigest = m.digest()
2056 MewIR.ModuleFilesHashHexDigest = m.hexdigest()
2057 MewIR.ModuleFilesChain = FileList
2058 with GlobalData.cache_lock:
2059 try:
2060 IR = gDict[(self.MetaFile.Path, self.Arch)]
2061 IR.ModuleFilesHashDigest = m.digest()
2062 IR.ModuleFilesHashHexDigest = m.hexdigest()
2063 IR.ModuleFilesChain = FileList
2064 gDict[(self.MetaFile.Path, self.Arch)] = IR
2065 except:
2066 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
2067
2068 return gDict[(self.MetaFile.Path, self.Arch)]
2069
2070 def GenPreMakefileHash(self, gDict):
2071 # Early exit if module or library has been hashed and is in memory
2072 if (self.MetaFile.Path, self.Arch) in gDict and \
2073 gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2074 return gDict[(self.MetaFile.Path, self.Arch)]
2075
2076 # skip if the module cache already crashed
2077 if (self.MetaFile.Path, self.Arch) in gDict and \
2078 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2079 return
2080
2081 # skip binary module
2082 if self.IsBinaryModule:
2083 return
2084
2085 if not (self.MetaFile.Path, self.Arch) in gDict or \
2086 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2087 self.GenModuleFilesHash(gDict)
2088
2089 if not (self.MetaFile.Path, self.Arch) in gDict or \
2090 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2091 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2092 return
2093
2094 # Initialze hash object
2095 m = hashlib.md5()
2096
2097 # Add Platform level hash
2098 if ('PlatformHash') in gDict:
2099 m.update(gDict[('PlatformHash')].encode('utf-8'))
2100 else:
2101 EdkLogger.quiet("[cache warning]: PlatformHash is missing")
2102
2103 # Add Package level hash
2104 if self.DependentPackageList:
2105 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
2106 if (Pkg.PackageName, 'PackageHash') in gDict:
2107 m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))
2108 else:
2109 EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))
2110
2111 # Add Library hash
2112 if self.LibraryAutoGenList:
2113 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2114 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2115 not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:
2116 Lib.GenPreMakefileHash(gDict)
2117 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)
2118
2119 # Add Module self
2120 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2121
2122 with GlobalData.cache_lock:
2123 IR = gDict[(self.MetaFile.Path, self.Arch)]
2124 IR.PreMakefileHashHexDigest = m.hexdigest()
2125 gDict[(self.MetaFile.Path, self.Arch)] = IR
2126
2127 return gDict[(self.MetaFile.Path, self.Arch)]
2128
2129 def GenMakeHeaderFilesHash(self, gDict):
2130 # Early exit if module or library has been hashed and is in memory
2131 if (self.MetaFile.Path, self.Arch) in gDict and \
2132 gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2133 return gDict[(self.MetaFile.Path, self.Arch)]
2134
2135 # skip if the module cache already crashed
2136 if (self.MetaFile.Path, self.Arch) in gDict and \
2137 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2138 return
2139
2140 # skip binary module
2141 if self.IsBinaryModule:
2142 return
2143
2144 if not (self.MetaFile.Path, self.Arch) in gDict or \
2145 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
2146 if self.IsLibrary:
2147 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:
2148 self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2149 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:
2150 self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2151 self.CreateCodeFile()
2152 if not (self.MetaFile.Path, self.Arch) in gDict or \
2153 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2154 self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.Path, self.Arch),[]))
2155
2156 if not (self.MetaFile.Path, self.Arch) in gDict or \
2157 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \
2158 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2159 EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2160 return
2161
2162 DependencyFileSet = set()
2163 # Add Makefile
2164 if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:
2165 DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)
2166 else:
2167 EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2168
2169 # Add header files
2170 if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2171 for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2172 DependencyFileSet.add(File)
2173 else:
2174 EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2175
2176 # Add AutoGen files
2177 if self.AutoGenFileList:
2178 for File in set(self.AutoGenFileList):
2179 DependencyFileSet.add(File)
2180
2181 # Caculate all above dependency files hash
2182 # Initialze hash object
2183 FileList = []
2184 m = hashlib.md5()
2185 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2186 if not os.path.exists(str(File)):
2187 EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2188 continue
2189 f = open(str(File), 'rb')
2190 Content = f.read()
2191 f.close()
2192 m.update(Content)
2193 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2194
2195 with GlobalData.cache_lock:
2196 IR = gDict[(self.MetaFile.Path, self.Arch)]
2197 IR.AutoGenFileList = self.AutoGenFileList.keys()
2198 IR.MakeHeaderFilesHashChain = FileList
2199 IR.MakeHeaderFilesHashDigest = m.digest()
2200 gDict[(self.MetaFile.Path, self.Arch)] = IR
2201
2202 return gDict[(self.MetaFile.Path, self.Arch)]
2203
2204 def GenMakeHash(self, gDict):
2205 # Early exit if module or library has been hashed and is in memory
2206 if (self.MetaFile.Path, self.Arch) in gDict and \
2207 gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2208 return gDict[(self.MetaFile.Path, self.Arch)]
2209
2210 # skip if the module cache already crashed
2211 if (self.MetaFile.Path, self.Arch) in gDict and \
2212 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2213 return
2214
2215 # skip binary module
2216 if self.IsBinaryModule:
2217 return
2218
2219 if not (self.MetaFile.Path, self.Arch) in gDict or \
2220 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2221 self.GenModuleFilesHash(gDict)
2222 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2223 self.GenMakeHeaderFilesHash(gDict)
2224
2225 if not (self.MetaFile.Path, self.Arch) in gDict or \
2226 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \
2227 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \
2228 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \
2229 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:
2230 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2231 return
2232
2233 # Initialze hash object
2234 m = hashlib.md5()
2235 MakeHashChain = []
2236
2237 # Add hash of makefile and dependency header files
2238 m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)
2239 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))
2240 New.sort(key=lambda x: str(x))
2241 MakeHashChain += New
2242
2243 # Add Library hash
2244 if self.LibraryAutoGenList:
2245 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2246 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2247 not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:
2248 Lib.GenMakeHash(gDict)
2249 if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:
2250 print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)
2251 continue
2252 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)
2253 New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))
2254 New.sort(key=lambda x: str(x))
2255 MakeHashChain += New
2256
2257 # Add Module self
2258 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2259 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))
2260 New.sort(key=lambda x: str(x))
2261 MakeHashChain += New
2262
2263 with GlobalData.cache_lock:
2264 IR = gDict[(self.MetaFile.Path, self.Arch)]
2265 IR.MakeHashDigest = m.digest()
2266 IR.MakeHashHexDigest = m.hexdigest()
2267 IR.MakeHashChain = MakeHashChain
2268 gDict[(self.MetaFile.Path, self.Arch)] = IR
2269
2270 return gDict[(self.MetaFile.Path, self.Arch)]
2271
2272 ## Decide whether we can skip the left autogen and make process
2273 def CanSkipbyPreMakefileCache(self, gDict):
2274 if not GlobalData.gBinCacheSource:
2275 return False
2276
2277 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:
2278 return True
2279
2280 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2281 return False
2282
2283 # If Module is binary, do not skip by cache
2284 if self.IsBinaryModule:
2285 return False
2286
2287 # .inc is contains binary information so do not skip by hash as well
2288 for f_ext in self.SourceFileList:
2289 if '.inc' in str(f_ext):
2290 return False
2291
2292 # Get the module hash values from stored cache and currrent build
2293 # then check whether cache hit based on the hash values
2294 # if cache hit, restore all the files from cache
2295 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2296 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2297
2298 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2299 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2300 if not os.path.exists(ModuleHashPair):
2301 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2302 with GlobalData.cache_lock:
2303 IR = gDict[(self.MetaFile.Path, self.Arch)]
2304 IR.CacheCrash = True
2305 gDict[(self.MetaFile.Path, self.Arch)] = IR
2306 return False
2307
2308 try:
2309 with open(ModuleHashPair, 'r') as f:
2310 ModuleHashPairList = json.load(f)
2311 except:
2312 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2313 return False
2314
2315 self.GenPreMakefileHash(gDict)
2316 if not (self.MetaFile.Path, self.Arch) in gDict or \
2317 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2318 EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2319 return False
2320
2321 MakeHashStr = None
2322 CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
2323 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2324 if PreMakefileHash == CurrentPreMakeHash:
2325 MakeHashStr = str(MakeHash)
2326
2327 if not MakeHashStr:
2328 return False
2329
2330 TargetHashDir = path.join(FileDir, MakeHashStr)
2331 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2332
2333 if not os.path.exists(TargetHashDir):
2334 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2335 return False
2336
2337 for root, dir, files in os.walk(TargetHashDir):
2338 for f in files:
2339 File = path.join(root, f)
2340 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2341 if os.path.exists(TargetFfsHashDir):
2342 for root, dir, files in os.walk(TargetFfsHashDir):
2343 for f in files:
2344 File = path.join(root, f)
2345 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2346
2347 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2348 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2349
2350 with GlobalData.cache_lock:
2351 IR = gDict[(self.MetaFile.Path, self.Arch)]
2352 IR.PreMakeCacheHit = True
2353 gDict[(self.MetaFile.Path, self.Arch)] = IR
2354 print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)
2355 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2356 return True
2357
2358 ## Decide whether we can skip the make process
2359 def CanSkipbyMakeCache(self, gDict):
2360 if not GlobalData.gBinCacheSource:
2361 return False
2362
2363 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2364 return True
2365
2366 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2367 return False
2368
2369 # If Module is binary, do not skip by cache
2370 if self.IsBinaryModule:
2371 print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)
2372 return False
2373
2374 # .inc is contains binary information so do not skip by hash as well
2375 for f_ext in self.SourceFileList:
2376 if '.inc' in str(f_ext):
2377 with GlobalData.cache_lock:
2378 IR = gDict[(self.MetaFile.Path, self.Arch)]
2379 IR.MakeCacheHit = False
2380 gDict[(self.MetaFile.Path, self.Arch)] = IR
2381 print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)
2382 return False
2383
2384 # Get the module hash values from stored cache and currrent build
2385 # then check whether cache hit based on the hash values
2386 # if cache hit, restore all the files from cache
2387 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2388 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2389
2390 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2391 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2392 if not os.path.exists(ModuleHashPair):
2393 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2394 with GlobalData.cache_lock:
2395 IR = gDict[(self.MetaFile.Path, self.Arch)]
2396 IR.CacheCrash = True
2397 gDict[(self.MetaFile.Path, self.Arch)] = IR
2398 return False
2399
2400 try:
2401 with open(ModuleHashPair, 'r') as f:
2402 ModuleHashPairList = json.load(f)
2403 except:
2404 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2405 return False
2406
2407 self.GenMakeHash(gDict)
2408 if not (self.MetaFile.Path, self.Arch) in gDict or \
2409 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
2410 EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2411 return False
2412
2413 MakeHashStr = None
2414 CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
2415 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2416 if MakeHash == CurrentMakeHash:
2417 MakeHashStr = str(MakeHash)
2418
2419 if not MakeHashStr:
2420 print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2421 return False
2422
2423 TargetHashDir = path.join(FileDir, MakeHashStr)
2424 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2425 if not os.path.exists(TargetHashDir):
2426 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2427 return False
2428
2429 for root, dir, files in os.walk(TargetHashDir):
2430 for f in files:
2431 File = path.join(root, f)
2432 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2433
2434 if os.path.exists(TargetFfsHashDir):
2435 for root, dir, files in os.walk(TargetFfsHashDir):
2436 for f in files:
2437 File = path.join(root, f)
2438 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2439
2440 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2441 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2442 with GlobalData.cache_lock:
2443 IR = gDict[(self.MetaFile.Path, self.Arch)]
2444 IR.MakeCacheHit = True
2445 gDict[(self.MetaFile.Path, self.Arch)] = IR
2446 print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2447 return True
2448
2449 ## Show the first file name which causes cache miss
2450 def PrintFirstMakeCacheMissFile(self, gDict):
2451 if not GlobalData.gBinCacheSource:
2452 return
2453
2454 # skip if the module cache already crashed
2455 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2456 return
2457
2458 # skip binary module
2459 if self.IsBinaryModule:
2460 return
2461
2462 if not (self.MetaFile.Path, self.Arch) in gDict:
2463 return
2464
2465 # Only print cache miss file for the MakeCache not hit module
2466 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2467 return
2468
2469 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2470 EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))
2471 return
2472
2473 # Find the cache dir name through the .ModuleHashPair file info
2474 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2475
2476 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2477 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2478 if not os.path.exists(ModuleHashPair):
2479 EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2480 return
2481
2482 try:
2483 with open(ModuleHashPair, 'r') as f:
2484 ModuleHashPairList = json.load(f)
2485 except:
2486 EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2487 return
2488
2489 MakeHashSet = set()
2490 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2491 TargetHashDir = path.join(FileDir, str(MakeHash))
2492 if os.path.exists(TargetHashDir):
2493 MakeHashSet.add(MakeHash)
2494 if not MakeHashSet:
2495 EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2496 return
2497
2498 TargetHash = list(MakeHashSet)[0]
2499 TargetHashDir = path.join(FileDir, str(TargetHash))
2500 if len(MakeHashSet) > 1 :
2501 EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))
2502
2503 ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')
2504 if os.path.exists(ListFile):
2505 try:
2506 f = open(ListFile, 'r')
2507 CachedList = json.load(f)
2508 f.close()
2509 except:
2510 EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)
2511 return
2512 else:
2513 EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)
2514 return
2515
2516 CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain
2517 for idx, (file, hash) in enumerate (CurrentList):
2518 (filecached, hashcached) = CachedList[idx]
2519 if file != filecached:
2520 EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))
2521 break
2522 if hash != hashcached:
2523 EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))
2524 break
2525
2526 return True
2527
2528 ## Decide whether we can skip the ModuleAutoGen process
2529 def CanSkipbyCache(self, gDict):
2530 # Hashing feature is off
2531 if not GlobalData.gBinCacheSource:
2532 return False
2533
2534 if self in GlobalData.gBuildHashSkipTracking:
2535 return GlobalData.gBuildHashSkipTracking[self]
2536
2537 # If library or Module is binary do not skip by hash
2538 if self.IsBinaryModule:
2539 GlobalData.gBuildHashSkipTracking[self] = False
2540 return False
2541
2542 # .inc is contains binary information so do not skip by hash as well
2543 for f_ext in self.SourceFileList:
2544 if '.inc' in str(f_ext):
2545 GlobalData.gBuildHashSkipTracking[self] = False
2546 return False
2547
2548 if not (self.MetaFile.Path, self.Arch) in gDict:
2549 return False
2550
2551 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:
2552 GlobalData.gBuildHashSkipTracking[self] = True
2553 return True
2554
2555 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2556 GlobalData.gBuildHashSkipTracking[self] = True
2557 return True
2558
2559 return False
2560
2561 ## Decide whether we can skip the ModuleAutoGen process
2562 # If any source file is newer than the module than we cannot skip
2563 #
2564 def CanSkip(self):
2565 # Don't skip if cache feature enabled
2566 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
2567 return False
2568 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
2569 return True
2570 if not os.path.exists(self.TimeStampPath):
2571 return False
2572 #last creation time of the module
2573 DstTimeStamp = os.stat(self.TimeStampPath)[8]
2574
2575 SrcTimeStamp = self.Workspace._SrcTimeStamp
2576 if SrcTimeStamp > DstTimeStamp:
2577 return False
2578
2579 with open(self.TimeStampPath,'r') as f:
2580 for source in f:
2581 source = source.rstrip('\n')
2582 if not os.path.exists(source):
2583 return False
2584 if source not in ModuleAutoGen.TimeDict :
2585 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
2586 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
2587 return False
2588 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
2589 return True
2590
2591 @cached_property
2592 def TimeStampPath(self):
2593 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')