]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: Print first cache missing file for build cachle
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 from AutoGen.AutoGen import AutoGen
9 from Common.LongFilePathSupport import CopyLongFilePath
10 from Common.BuildToolError import *
11 from Common.DataType import *
12 from Common.Misc import *
13 from Common.StringUtils import NormPath,GetSplitList
14 from collections import defaultdict
15 from Workspace.WorkspaceCommon import OrderedListDict
16 import os.path as path
17 import copy
18 import hashlib
19 from . import InfSectionParser
20 from . import GenC
21 from . import GenMake
22 from . import GenDepex
23 from io import BytesIO
24 from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
25 from Workspace.MetaFileCommentParser import UsageList
26 from .GenPcdDb import CreatePcdDatabaseCode
27 from Common.caching import cached_class_function
28 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
29 from AutoGen.CacheIR import ModuleBuildCacheIR
30 import json
31
32 ## Mapping Makefile type
33 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
34 #
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
36 # is the former use /I , the Latter used -I to specify include directories
37 #
38 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
39 gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
40
41 ## default file name for AutoGen
42 gAutoGenCodeFileName = "AutoGen.c"
43 gAutoGenHeaderFileName = "AutoGen.h"
44 gAutoGenStringFileName = "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName = "%(module_name)s.depex"
47 gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
49 gInfSpecVersion = "0x00010017"
50
51 #
52 # Match name = variable
53 #
54 gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
55 #
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
58 #
59 gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
60
61 #
62 # Template string to generic AsBuilt INF
63 #
64 gAsBuiltInfHeaderString = TemplateString("""${header_comments}
65
66 # DO NOT EDIT
67 # FILE auto-generated
68
69 [Defines]
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
91
92 [Packages.${module_arch}]${BEGIN}
93 ${package_item}${END}
94
95 [Binaries.${module_arch}]${BEGIN}
96 ${binary_item}${END}
97
98 [PatchPcd.${module_arch}]${BEGIN}
99 ${patchablepcd_item}
100 ${END}
101
102 [Protocols.${module_arch}]${BEGIN}
103 ${protocol_item}
104 ${END}
105
106 [Ppis.${module_arch}]${BEGIN}
107 ${ppi_item}
108 ${END}
109
110 [Guids.${module_arch}]${BEGIN}
111 ${guid_item}
112 ${END}
113
114 [PcdEx.${module_arch}]${BEGIN}
115 ${pcd_item}
116 ${END}
117
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
121
122 ${depexsection_item}
123
124 ${userextension_tianocore_item}
125
126 ${tail_comments}
127
128 [BuildOptions.${module_arch}]
129 ## @AsBuilt${BEGIN}
130 ## ${flags_item}${END}
131 """)
132 #
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
135 #
136 def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
137 for Key in CopyFromDict:
138 CopyToDict[Key].extend(CopyFromDict[Key])
139
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList):
142 RetVal = path.join(*PathList)
143 CreateDirectory(RetVal)
144 return RetVal
145
146 #
147 # Convert string to C format array
148 #
149 def _ConvertStringToByteArray(Value):
150 Value = Value.strip()
151 if not Value:
152 return None
153 if Value[0] == '{':
154 if not Value.endswith('}'):
155 return None
156 Value = Value.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields = Value.split(',')
158 try:
159 for Index in range(len(ValFields)):
160 ValFields[Index] = str(int(ValFields[Index], 0))
161 except ValueError:
162 return None
163 Value = '{' + ','.join(ValFields) + '}'
164 return Value
165
166 Unicode = False
167 if Value.startswith('L"'):
168 if not Value.endswith('"'):
169 return None
170 Value = Value[1:]
171 Unicode = True
172 elif not Value.startswith('"') or not Value.endswith('"'):
173 return None
174
175 Value = eval(Value) # translate escape character
176 NewValue = '{'
177 for Index in range(0, len(Value)):
178 if Unicode:
179 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
180 else:
181 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
182 Value = NewValue + '0}'
183 return Value
184
185 ## ModuleAutoGen class
186 #
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
190 #
191 class ModuleAutoGen(AutoGen):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
194 if not hasattr(self, "_Init"):
195 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
196 self._Init = True
197
198 ## Cache the timestamps of metafiles of every module in a class attribute
199 #
200 TimeDict = {}
201
202 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
205 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
206 % (MetaFile, Arch))
207 return None
208 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
209
210 ## Initialize ModuleAutoGen
211 #
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
218 #
219 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
220 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
221 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
222
223 self.Workspace = Workspace
224 self.WorkspaceDir = ""
225 self.PlatformInfo = None
226 self.DataPipe = DataPipe
227 self.__init_platform_info__()
228 self.MetaFile = ModuleFile
229 self.SourceDir = self.MetaFile.SubDir
230 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
231
232 self.ToolChain = Toolchain
233 self.BuildTarget = Target
234 self.Arch = Arch
235 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
236 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
237
238 self.IsCodeFileCreated = False
239 self.IsAsBuiltInfCreated = False
240 self.DepexGenerated = False
241
242 self.BuildDatabase = self.Workspace.BuildDatabase
243 self.BuildRuleOrder = None
244 self.BuildTime = 0
245
246 self._GuidComments = OrderedListDict()
247 self._ProtocolComments = OrderedListDict()
248 self._PpiComments = OrderedListDict()
249 self._BuildTargets = None
250 self._IntroBuildTargetList = None
251 self._FinalBuildTargetList = None
252 self._FileTypes = None
253
254 self.AutoGenDepSet = set()
255 self.ReferenceModules = []
256 self.ConstPcd = {}
257 self.Makefile = None
258 self.FileDependCache = {}
259
260 def __init_platform_info__(self):
261 pinfo = self.DataPipe.Get("P_Info")
262 self.WorkspaceDir = pinfo.get("WorkspaceDir")
263 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
264 ## hash() operator of ModuleAutoGen
265 #
266 # The module file path and arch string will be used to represent
267 # hash value of this object
268 #
269 # @retval int Hash value of the module file path and arch
270 #
271 @cached_class_function
272 def __hash__(self):
273 return hash((self.MetaFile, self.Arch))
274 def __repr__(self):
275 return "%s [%s]" % (self.MetaFile, self.Arch)
276
277 # Get FixedAtBuild Pcds of this Module
278 @cached_property
279 def FixedAtBuildPcds(self):
280 RetVal = []
281 for Pcd in self.ModulePcdList:
282 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
283 continue
284 if Pcd not in RetVal:
285 RetVal.append(Pcd)
286 return RetVal
287
288 @cached_property
289 def FixedVoidTypePcds(self):
290 RetVal = {}
291 for Pcd in self.FixedAtBuildPcds:
292 if Pcd.DatumType == TAB_VOID:
293 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
294 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
295 return RetVal
296
297 @property
298 def UniqueBaseName(self):
299 ModuleNames = self.DataPipe.Get("M_Name")
300 if not ModuleNames:
301 return self.Name
302 return ModuleNames.get((self.Name,self.MetaFile),self.Name)
303
304 # Macros could be used in build_rule.txt (also Makefile)
305 @cached_property
306 def Macros(self):
307 return OrderedDict((
308 ("WORKSPACE" ,self.WorkspaceDir),
309 ("MODULE_NAME" ,self.Name),
310 ("MODULE_NAME_GUID" ,self.UniqueBaseName),
311 ("MODULE_GUID" ,self.Guid),
312 ("MODULE_VERSION" ,self.Version),
313 ("MODULE_TYPE" ,self.ModuleType),
314 ("MODULE_FILE" ,str(self.MetaFile)),
315 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
316 ("MODULE_RELATIVE_DIR" ,self.SourceDir),
317 ("MODULE_DIR" ,self.SourceDir),
318 ("BASE_NAME" ,self.Name),
319 ("ARCH" ,self.Arch),
320 ("TOOLCHAIN" ,self.ToolChain),
321 ("TOOLCHAIN_TAG" ,self.ToolChain),
322 ("TOOL_CHAIN_TAG" ,self.ToolChain),
323 ("TARGET" ,self.BuildTarget),
324 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
325 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
326 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
327 ("MODULE_BUILD_DIR" ,self.BuildDir),
328 ("OUTPUT_DIR" ,self.OutputDir),
329 ("DEBUG_DIR" ,self.DebugDir),
330 ("DEST_DIR_OUTPUT" ,self.OutputDir),
331 ("DEST_DIR_DEBUG" ,self.DebugDir),
332 ("PLATFORM_NAME" ,self.PlatformInfo.Name),
333 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
334 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
335 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
336 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
337 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
338 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
339 ))
340
341 ## Return the module build data object
342 @cached_property
343 def Module(self):
344 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
345
346 ## Return the module name
347 @cached_property
348 def Name(self):
349 return self.Module.BaseName
350
351 ## Return the module DxsFile if exist
352 @cached_property
353 def DxsFile(self):
354 return self.Module.DxsFile
355
356 ## Return the module meta-file GUID
357 @cached_property
358 def Guid(self):
359 #
360 # To build same module more than once, the module path with FILE_GUID overridden has
361 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
362 # in DSC. The overridden GUID can be retrieved from file name
363 #
364 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
365 #
366 # Length of GUID is 36
367 #
368 return os.path.basename(self.MetaFile.Path)[:36]
369 return self.Module.Guid
370
371 ## Return the module version
372 @cached_property
373 def Version(self):
374 return self.Module.Version
375
376 ## Return the module type
377 @cached_property
378 def ModuleType(self):
379 return self.Module.ModuleType
380
381 ## Return the component type (for Edk.x style of module)
382 @cached_property
383 def ComponentType(self):
384 return self.Module.ComponentType
385
386 ## Return the build type
387 @cached_property
388 def BuildType(self):
389 return self.Module.BuildType
390
391 ## Return the PCD_IS_DRIVER setting
392 @cached_property
393 def PcdIsDriver(self):
394 return self.Module.PcdIsDriver
395
396 ## Return the autogen version, i.e. module meta-file version
397 @cached_property
398 def AutoGenVersion(self):
399 return self.Module.AutoGenVersion
400
401 ## Check if the module is library or not
402 @cached_property
403 def IsLibrary(self):
404 return bool(self.Module.LibraryClass)
405
406 ## Check if the module is binary module or not
407 @cached_property
408 def IsBinaryModule(self):
409 return self.Module.IsBinaryModule
410
411 ## Return the directory to store intermediate files of the module
412 @cached_property
413 def BuildDir(self):
414 return _MakeDir((
415 self.PlatformInfo.BuildDir,
416 self.Arch,
417 self.SourceDir,
418 self.MetaFile.BaseName
419 ))
420
421 ## Return the directory to store the intermediate object files of the module
422 @cached_property
423 def OutputDir(self):
424 return _MakeDir((self.BuildDir, "OUTPUT"))
425
426 ## Return the directory path to store ffs file
427 @cached_property
428 def FfsOutputDir(self):
429 if GlobalData.gFdfParser:
430 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
431 return ''
432
433 ## Return the directory to store auto-gened source files of the module
434 @cached_property
435 def DebugDir(self):
436 return _MakeDir((self.BuildDir, "DEBUG"))
437
438 ## Return the path of custom file
439 @cached_property
440 def CustomMakefile(self):
441 RetVal = {}
442 for Type in self.Module.CustomMakefile:
443 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
444 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
445 RetVal[MakeType] = File
446 return RetVal
447
448 ## Return the directory of the makefile
449 #
450 # @retval string The directory string of module's makefile
451 #
452 @cached_property
453 def MakeFileDir(self):
454 return self.BuildDir
455
456 ## Return build command string
457 #
458 # @retval string Build command string
459 #
460 @cached_property
461 def BuildCommand(self):
462 return self.PlatformInfo.BuildCommand
463
464 ## Get object list of all packages the module and its dependent libraries belong to
465 #
466 # @retval list The list of package object
467 #
468 @cached_property
469 def DerivedPackageList(self):
470 PackageList = []
471 for M in [self.Module] + self.DependentLibraryList:
472 for Package in M.Packages:
473 if Package in PackageList:
474 continue
475 PackageList.append(Package)
476 return PackageList
477
478 ## Get the depex string
479 #
480 # @return : a string contain all depex expression.
481 def _GetDepexExpresionString(self):
482 DepexStr = ''
483 DepexList = []
484 ## DPX_SOURCE IN Define section.
485 if self.Module.DxsFile:
486 return DepexStr
487 for M in [self.Module] + self.DependentLibraryList:
488 Filename = M.MetaFile.Path
489 InfObj = InfSectionParser.InfSectionParser(Filename)
490 DepexExpressionList = InfObj.GetDepexExpresionList()
491 for DepexExpression in DepexExpressionList:
492 for key in DepexExpression:
493 Arch, ModuleType = key
494 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
495 # the type of build module is USER_DEFINED.
496 # All different DEPEX section tags would be copied into the As Built INF file
497 # and there would be separate DEPEX section tags
498 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
499 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
500 DepexList.append({(Arch, ModuleType): DepexExpr})
501 else:
502 if Arch.upper() == TAB_ARCH_COMMON or \
503 (Arch.upper() == self.Arch.upper() and \
504 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
505 DepexList.append({(Arch, ModuleType): DepexExpr})
506
507 #the type of build module is USER_DEFINED.
508 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
509 for Depex in DepexList:
510 for key in Depex:
511 DepexStr += '[Depex.%s.%s]\n' % key
512 DepexStr += '\n'.join('# '+ val for val in Depex[key])
513 DepexStr += '\n\n'
514 if not DepexStr:
515 return '[Depex.%s]\n' % self.Arch
516 return DepexStr
517
518 #the type of build module not is USER_DEFINED.
519 Count = 0
520 for Depex in DepexList:
521 Count += 1
522 if DepexStr != '':
523 DepexStr += ' AND '
524 DepexStr += '('
525 for D in Depex.values():
526 DepexStr += ' '.join(val for val in D)
527 Index = DepexStr.find('END')
528 if Index > -1 and Index == len(DepexStr) - 3:
529 DepexStr = DepexStr[:-3]
530 DepexStr = DepexStr.strip()
531 DepexStr += ')'
532 if Count == 1:
533 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
534 if not DepexStr:
535 return '[Depex.%s]\n' % self.Arch
536 return '[Depex.%s]\n# ' % self.Arch + DepexStr
537
538 ## Merge dependency expression
539 #
540 # @retval list The token list of the dependency expression after parsed
541 #
542 @cached_property
543 def DepexList(self):
544 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
545 return {}
546
547 DepexList = []
548 #
549 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
550 #
551 FixedVoidTypePcds = {}
552 for M in [self] + self.LibraryAutoGenList:
553 FixedVoidTypePcds.update(M.FixedVoidTypePcds)
554 for M in [self] + self.LibraryAutoGenList:
555 Inherited = False
556 for D in M.Module.Depex[self.Arch, self.ModuleType]:
557 if DepexList != []:
558 DepexList.append('AND')
559 DepexList.append('(')
560 #replace D with value if D is FixedAtBuild PCD
561 NewList = []
562 for item in D:
563 if '.' not in item:
564 NewList.append(item)
565 else:
566 try:
567 Value = FixedVoidTypePcds[item]
568 if len(Value.split(',')) != 16:
569 EdkLogger.error("build", FORMAT_INVALID,
570 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
571 NewList.append(Value)
572 except:
573 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
574
575 DepexList.extend(NewList)
576 if DepexList[-1] == 'END': # no need of a END at this time
577 DepexList.pop()
578 DepexList.append(')')
579 Inherited = True
580 if Inherited:
581 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
582 if 'BEFORE' in DepexList or 'AFTER' in DepexList:
583 break
584 if len(DepexList) > 0:
585 EdkLogger.verbose('')
586 return {self.ModuleType:DepexList}
587
588 ## Merge dependency expression
589 #
590 # @retval list The token list of the dependency expression after parsed
591 #
592 @cached_property
593 def DepexExpressionDict(self):
594 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
595 return {}
596
597 DepexExpressionString = ''
598 #
599 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
600 #
601 for M in [self.Module] + self.DependentLibraryList:
602 Inherited = False
603 for D in M.DepexExpression[self.Arch, self.ModuleType]:
604 if DepexExpressionString != '':
605 DepexExpressionString += ' AND '
606 DepexExpressionString += '('
607 DepexExpressionString += D
608 DepexExpressionString = DepexExpressionString.rstrip('END').strip()
609 DepexExpressionString += ')'
610 Inherited = True
611 if Inherited:
612 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
613 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
614 break
615 if len(DepexExpressionString) > 0:
616 EdkLogger.verbose('')
617
618 return {self.ModuleType:DepexExpressionString}
619
620 # Get the tiano core user extension, it is contain dependent library.
621 # @retval: a list contain tiano core userextension.
622 #
623 def _GetTianoCoreUserExtensionList(self):
624 TianoCoreUserExtentionList = []
625 for M in [self.Module] + self.DependentLibraryList:
626 Filename = M.MetaFile.Path
627 InfObj = InfSectionParser.InfSectionParser(Filename)
628 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
629 for TianoCoreUserExtent in TianoCoreUserExtenList:
630 for Section in TianoCoreUserExtent:
631 ItemList = Section.split(TAB_SPLIT)
632 Arch = self.Arch
633 if len(ItemList) == 4:
634 Arch = ItemList[3]
635 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
636 TianoCoreList = []
637 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
638 TianoCoreList.extend(TianoCoreUserExtent[Section][:])
639 TianoCoreList.append('\n')
640 TianoCoreUserExtentionList.append(TianoCoreList)
641
642 return TianoCoreUserExtentionList
643
644 ## Return the list of specification version required for the module
645 #
646 # @retval list The list of specification defined in module file
647 #
648 @cached_property
649 def Specification(self):
650 return self.Module.Specification
651
652 ## Tool option for the module build
653 #
654 # @param PlatformInfo The object of PlatformBuildInfo
655 # @retval dict The dict containing valid options
656 #
657 @cached_property
658 def BuildOption(self):
659 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
660 if self.BuildRuleOrder:
661 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
662 return RetVal
663
664 ## Get include path list from tool option for the module build
665 #
666 # @retval list The include path list
667 #
668 @cached_property
669 def BuildOptionIncPathList(self):
670 #
671 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
672 # is the former use /I , the Latter used -I to specify include directories
673 #
674 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
675 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
676 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
677 BuildOptIncludeRegEx = gBuildOptIncludePatternOther
678 else:
679 #
680 # New ToolChainFamily, don't known whether there is option to specify include directories
681 #
682 return []
683
684 RetVal = []
685 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
686 try:
687 FlagOption = self.BuildOption[Tool]['FLAGS']
688 except KeyError:
689 FlagOption = ''
690
691 if self.ToolChainFamily != 'RVCT':
692 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
693 else:
694 #
695 # RVCT may specify a list of directory seperated by commas
696 #
697 IncPathList = []
698 for Path in BuildOptIncludeRegEx.findall(FlagOption):
699 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
700 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
701
702 #
703 # EDK II modules must not reference header files outside of the packages they depend on or
704 # within the module's directory tree. Report error if violation.
705 #
706 if GlobalData.gDisableIncludePathCheck == False:
707 for Path in IncPathList:
708 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
709 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
710 EdkLogger.error("build",
711 PARAMETER_INVALID,
712 ExtraData=ErrMsg,
713 File=str(self.MetaFile))
714 RetVal += IncPathList
715 return RetVal
716
717 ## Return a list of files which can be built from source
718 #
719 # What kind of files can be built is determined by build rules in
720 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
721 #
722 @cached_property
723 def SourceFileList(self):
724 RetVal = []
725 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
726 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
727 for F in self.Module.Sources:
728 # match tool chain
729 if F.TagName not in ToolChainTagSet:
730 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
731 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
732 continue
733 # match tool chain family or build rule family
734 if F.ToolChainFamily not in ToolChainFamilySet:
735 EdkLogger.debug(
736 EdkLogger.DEBUG_0,
737 "The file [%s] must be built by tools of [%s], " \
738 "but current toolchain family is [%s], buildrule family is [%s]" \
739 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
740 continue
741
742 # add the file path into search path list for file including
743 if F.Dir not in self.IncludePathList:
744 self.IncludePathList.insert(0, F.Dir)
745 RetVal.append(F)
746
747 self._MatchBuildRuleOrder(RetVal)
748
749 for F in RetVal:
750 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
751 return RetVal
752
753 def _MatchBuildRuleOrder(self, FileList):
754 Order_Dict = {}
755 self.BuildOption
756 for SingleFile in FileList:
757 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
758 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
759 if key in Order_Dict:
760 Order_Dict[key].append(SingleFile.Ext)
761 else:
762 Order_Dict[key] = [SingleFile.Ext]
763
764 RemoveList = []
765 for F in Order_Dict:
766 if len(Order_Dict[F]) > 1:
767 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
768 for Ext in Order_Dict[F][1:]:
769 RemoveList.append(F + Ext)
770
771 for item in RemoveList:
772 FileList.remove(item)
773
774 return FileList
775
776 ## Return the list of unicode files
777 @cached_property
778 def UnicodeFileList(self):
779 return self.FileTypes.get(TAB_UNICODE_FILE,[])
780
781 ## Return the list of vfr files
782 @cached_property
783 def VfrFileList(self):
784 return self.FileTypes.get(TAB_VFR_FILE, [])
785
786 ## Return the list of Image Definition files
787 @cached_property
788 def IdfFileList(self):
789 return self.FileTypes.get(TAB_IMAGE_FILE,[])
790
791 ## Return a list of files which can be built from binary
792 #
793 # "Build" binary files are just to copy them to build directory.
794 #
795 # @retval list The list of files which can be built later
796 #
797 @cached_property
798 def BinaryFileList(self):
799 RetVal = []
800 for F in self.Module.Binaries:
801 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
802 continue
803 RetVal.append(F)
804 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
805 return RetVal
806
807 @cached_property
808 def BuildRules(self):
809 RetVal = {}
810 BuildRuleDatabase = self.PlatformInfo.BuildRule
811 for Type in BuildRuleDatabase.FileTypeList:
812 #first try getting build rule by BuildRuleFamily
813 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
814 if not RuleObject:
815 # build type is always module type, but ...
816 if self.ModuleType != self.BuildType:
817 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
818 #second try getting build rule by ToolChainFamily
819 if not RuleObject:
820 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
821 if not RuleObject:
822 # build type is always module type, but ...
823 if self.ModuleType != self.BuildType:
824 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
825 if not RuleObject:
826 continue
827 RuleObject = RuleObject.Instantiate(self.Macros)
828 RetVal[Type] = RuleObject
829 for Ext in RuleObject.SourceFileExtList:
830 RetVal[Ext] = RuleObject
831 return RetVal
832
833 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
834 if self._BuildTargets is None:
835 self._IntroBuildTargetList = set()
836 self._FinalBuildTargetList = set()
837 self._BuildTargets = defaultdict(set)
838 self._FileTypes = defaultdict(set)
839
840 if not BinaryFileList:
841 BinaryFileList = self.BinaryFileList
842
843 SubDirectory = os.path.join(self.OutputDir, File.SubDir)
844 if not os.path.exists(SubDirectory):
845 CreateDirectory(SubDirectory)
846 LastTarget = None
847 RuleChain = set()
848 SourceList = [File]
849 Index = 0
850 #
851 # Make sure to get build rule order value
852 #
853 self.BuildOption
854
855 while Index < len(SourceList):
856 Source = SourceList[Index]
857 Index = Index + 1
858
859 if Source != File:
860 CreateDirectory(Source.Dir)
861
862 if File.IsBinary and File == Source and File in BinaryFileList:
863 # Skip all files that are not binary libraries
864 if not self.IsLibrary:
865 continue
866 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
867 elif FileType in self.BuildRules:
868 RuleObject = self.BuildRules[FileType]
869 elif Source.Ext in self.BuildRules:
870 RuleObject = self.BuildRules[Source.Ext]
871 else:
872 # stop at no more rules
873 if LastTarget:
874 self._FinalBuildTargetList.add(LastTarget)
875 break
876
877 FileType = RuleObject.SourceFileType
878 self._FileTypes[FileType].add(Source)
879
880 # stop at STATIC_LIBRARY for library
881 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
882 if LastTarget:
883 self._FinalBuildTargetList.add(LastTarget)
884 break
885
886 Target = RuleObject.Apply(Source, self.BuildRuleOrder)
887 if not Target:
888 if LastTarget:
889 self._FinalBuildTargetList.add(LastTarget)
890 break
891 elif not Target.Outputs:
892 # Only do build for target with outputs
893 self._FinalBuildTargetList.add(Target)
894
895 self._BuildTargets[FileType].add(Target)
896
897 if not Source.IsBinary and Source == File:
898 self._IntroBuildTargetList.add(Target)
899
900 # to avoid cyclic rule
901 if FileType in RuleChain:
902 break
903
904 RuleChain.add(FileType)
905 SourceList.extend(Target.Outputs)
906 LastTarget = Target
907 FileType = TAB_UNKNOWN_FILE
908
909 @cached_property
910 def Targets(self):
911 if self._BuildTargets is None:
912 self._IntroBuildTargetList = set()
913 self._FinalBuildTargetList = set()
914 self._BuildTargets = defaultdict(set)
915 self._FileTypes = defaultdict(set)
916
917 #TRICK: call SourceFileList property to apply build rule for source files
918 self.SourceFileList
919
920 #TRICK: call _GetBinaryFileList to apply build rule for binary files
921 self.BinaryFileList
922
923 return self._BuildTargets
924
925 @cached_property
926 def IntroTargetList(self):
927 self.Targets
928 return self._IntroBuildTargetList
929
930 @cached_property
931 def CodaTargetList(self):
932 self.Targets
933 return self._FinalBuildTargetList
934
935 @cached_property
936 def FileTypes(self):
937 self.Targets
938 return self._FileTypes
939
940 ## Get the list of package object the module depends on
941 #
942 # @retval list The package object list
943 #
944 @cached_property
945 def DependentPackageList(self):
946 return self.Module.Packages
947
948 ## Return the list of auto-generated code file
949 #
950 # @retval list The list of auto-generated file
951 #
952 @cached_property
953 def AutoGenFileList(self):
954 AutoGenUniIdf = self.BuildType != 'UEFI_HII'
955 UniStringBinBuffer = BytesIO()
956 IdfGenBinBuffer = BytesIO()
957 RetVal = {}
958 AutoGenC = TemplateString()
959 AutoGenH = TemplateString()
960 StringH = TemplateString()
961 StringIdf = TemplateString()
962 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
963 #
964 # AutoGen.c is generated if there are library classes in inf, or there are object files
965 #
966 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
967 or TAB_OBJECT_FILE in self.FileTypes):
968 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
969 RetVal[AutoFile] = str(AutoGenC)
970 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
971 if str(AutoGenH) != "":
972 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
973 RetVal[AutoFile] = str(AutoGenH)
974 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
975 if str(StringH) != "":
976 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
977 RetVal[AutoFile] = str(StringH)
978 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
979 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
980 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
981 RetVal[AutoFile] = UniStringBinBuffer.getvalue()
982 AutoFile.IsBinary = True
983 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
984 if UniStringBinBuffer is not None:
985 UniStringBinBuffer.close()
986 if str(StringIdf) != "":
987 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
988 RetVal[AutoFile] = str(StringIdf)
989 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
990 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
991 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
992 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
993 AutoFile.IsBinary = True
994 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
995 if IdfGenBinBuffer is not None:
996 IdfGenBinBuffer.close()
997 return RetVal
998
999 ## Return the list of library modules explicitly or implicitly used by this module
1000 @cached_property
1001 def DependentLibraryList(self):
1002 # only merge library classes and PCD for non-library module
1003 if self.IsLibrary:
1004 return []
1005 return self.PlatformInfo.ApplyLibraryInstance(self.Module)
1006
1007 ## Get the list of PCDs from current module
1008 #
1009 # @retval list The list of PCD
1010 #
1011 @cached_property
1012 def ModulePcdList(self):
1013 # apply PCD settings from platform
1014 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
1015
1016 return RetVal
1017 @cached_property
1018 def _PcdComments(self):
1019 ReVal = OrderedListDict()
1020 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
1021 if not self.IsLibrary:
1022 for Library in self.DependentLibraryList:
1023 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
1024 return ReVal
1025
1026 ## Get the list of PCDs from dependent libraries
1027 #
1028 # @retval list The list of PCD
1029 #
1030 @cached_property
1031 def LibraryPcdList(self):
1032 if self.IsLibrary:
1033 return []
1034 RetVal = []
1035 Pcds = set()
1036 # get PCDs from dependent libraries
1037 for Library in self.DependentLibraryList:
1038 PcdsInLibrary = OrderedDict()
1039 for Key in Library.Pcds:
1040 # skip duplicated PCDs
1041 if Key in self.Module.Pcds or Key in Pcds:
1042 continue
1043 Pcds.add(Key)
1044 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
1045 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))
1046 return RetVal
1047
1048 ## Get the GUID value mapping
1049 #
1050 # @retval dict The mapping between GUID cname and its value
1051 #
1052 @cached_property
1053 def GuidList(self):
1054 RetVal = self.Module.Guids
1055 for Library in self.DependentLibraryList:
1056 RetVal.update(Library.Guids)
1057 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
1058 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
1059 return RetVal
1060
1061 @cached_property
1062 def GetGuidsUsedByPcd(self):
1063 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
1064 for Library in self.DependentLibraryList:
1065 RetVal.update(Library.GetGuidsUsedByPcd())
1066 return RetVal
1067 ## Get the protocol value mapping
1068 #
1069 # @retval dict The mapping between protocol cname and its value
1070 #
1071 @cached_property
1072 def ProtocolList(self):
1073 RetVal = OrderedDict(self.Module.Protocols)
1074 for Library in self.DependentLibraryList:
1075 RetVal.update(Library.Protocols)
1076 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
1077 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
1078 return RetVal
1079
1080 ## Get the PPI value mapping
1081 #
1082 # @retval dict The mapping between PPI cname and its value
1083 #
1084 @cached_property
1085 def PpiList(self):
1086 RetVal = OrderedDict(self.Module.Ppis)
1087 for Library in self.DependentLibraryList:
1088 RetVal.update(Library.Ppis)
1089 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
1090 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
1091 return RetVal
1092
1093 ## Get the list of include search path
1094 #
1095 # @retval list The list path
1096 #
1097 @cached_property
1098 def IncludePathList(self):
1099 RetVal = []
1100 RetVal.append(self.MetaFile.Dir)
1101 RetVal.append(self.DebugDir)
1102
1103 for Package in self.Module.Packages:
1104 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1105 if PackageDir not in RetVal:
1106 RetVal.append(PackageDir)
1107 IncludesList = Package.Includes
1108 if Package._PrivateIncludes:
1109 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
1110 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1111 for Inc in IncludesList:
1112 if Inc not in RetVal:
1113 RetVal.append(str(Inc))
1114 return RetVal
1115
1116 @cached_property
1117 def IncludePathLength(self):
1118 return sum(len(inc)+1 for inc in self.IncludePathList)
1119
1120 ## Get the list of include paths from the packages
1121 #
1122 # @IncludesList list The list path
1123 #
1124 @cached_property
1125 def PackageIncludePathList(self):
1126 IncludesList = []
1127 for Package in self.Module.Packages:
1128 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1129 IncludesList = Package.Includes
1130 if Package._PrivateIncludes:
1131 if not self.MetaFile.Path.startswith(PackageDir):
1132 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1133 return IncludesList
1134
1135 ## Get HII EX PCDs which maybe used by VFR
1136 #
1137 # efivarstore used by VFR may relate with HII EX PCDs
1138 # Get the variable name and GUID from efivarstore and HII EX PCD
1139 # List the HII EX PCDs in As Built INF if both name and GUID match.
1140 #
1141 # @retval list HII EX PCDs
1142 #
1143 def _GetPcdsMaybeUsedByVfr(self):
1144 if not self.SourceFileList:
1145 return []
1146
1147 NameGuids = set()
1148 for SrcFile in self.SourceFileList:
1149 if SrcFile.Ext.lower() != '.vfr':
1150 continue
1151 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
1152 if not os.path.exists(Vfri):
1153 continue
1154 VfriFile = open(Vfri, 'r')
1155 Content = VfriFile.read()
1156 VfriFile.close()
1157 Pos = Content.find('efivarstore')
1158 while Pos != -1:
1159 #
1160 # Make sure 'efivarstore' is the start of efivarstore statement
1161 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1162 #
1163 Index = Pos - 1
1164 while Index >= 0 and Content[Index] in ' \t\r\n':
1165 Index -= 1
1166 if Index >= 0 and Content[Index] != ';':
1167 Pos = Content.find('efivarstore', Pos + len('efivarstore'))
1168 continue
1169 #
1170 # 'efivarstore' must be followed by name and guid
1171 #
1172 Name = gEfiVarStoreNamePattern.search(Content, Pos)
1173 if not Name:
1174 break
1175 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
1176 if not Guid:
1177 break
1178 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
1179 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
1180 Pos = Content.find('efivarstore', Name.end())
1181 if not NameGuids:
1182 return []
1183 HiiExPcds = []
1184 for Pcd in self.PlatformInfo.Pcds.values():
1185 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
1186 continue
1187 for SkuInfo in Pcd.SkuInfoList.values():
1188 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
1189 if not Value:
1190 continue
1191 Name = _ConvertStringToByteArray(SkuInfo.VariableName)
1192 Guid = GuidStructureStringToGuidString(Value)
1193 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
1194 HiiExPcds.append(Pcd)
1195 break
1196
1197 return HiiExPcds
1198
1199 def _GenOffsetBin(self):
1200 VfrUniBaseName = {}
1201 for SourceFile in self.Module.Sources:
1202 if SourceFile.Type.upper() == ".VFR" :
1203 #
1204 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1205 #
1206 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
1207 elif SourceFile.Type.upper() == ".UNI" :
1208 #
1209 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1210 #
1211 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
1212
1213 if not VfrUniBaseName:
1214 return None
1215 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
1216 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
1217 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
1218 if not VfrUniOffsetList:
1219 return None
1220
1221 OutputName = '%sOffset.bin' % self.Name
1222 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
1223
1224 try:
1225 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
1226 except:
1227 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
1228
1229 # Use a instance of BytesIO to cache data
1230 fStringIO = BytesIO()
1231
1232 for Item in VfrUniOffsetList:
1233 if (Item[0].find("Strings") != -1):
1234 #
1235 # UNI offset in image.
1236 # GUID + Offset
1237 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1238 #
1239 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1240 fStringIO.write(UniGuid)
1241 UniValue = pack ('Q', int (Item[1], 16))
1242 fStringIO.write (UniValue)
1243 else:
1244 #
1245 # VFR binary offset in image.
1246 # GUID + Offset
1247 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1248 #
1249 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1250 fStringIO.write(VfrGuid)
1251 VfrValue = pack ('Q', int (Item[1], 16))
1252 fStringIO.write (VfrValue)
1253 #
1254 # write data into file.
1255 #
1256 try :
1257 fInputfile.write (fStringIO.getvalue())
1258 except:
1259 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
1260 "file been locked or using by other applications." %UniVfrOffsetFileName, None)
1261
1262 fStringIO.close ()
1263 fInputfile.close ()
1264 return OutputName
1265 @cached_property
1266 def OutputFile(self):
1267 retVal = set()
1268 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1269 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1270 for Item in self.CodaTargetList:
1271 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1272 retVal.add(File)
1273 if self.DepexGenerated:
1274 retVal.add(self.Name + '.depex')
1275
1276 Bin = self._GenOffsetBin()
1277 if Bin:
1278 retVal.add(Bin)
1279
1280 for Root, Dirs, Files in os.walk(OutputDir):
1281 for File in Files:
1282 if File.lower().endswith('.pdb'):
1283 retVal.add(File)
1284
1285 return retVal
1286
1287 ## Create AsBuilt INF file the module
1288 #
1289 def CreateAsBuiltInf(self):
1290
1291 if self.IsAsBuiltInfCreated:
1292 return
1293
1294 # Skip INF file generation for libraries
1295 if self.IsLibrary:
1296 return
1297
1298 # Skip the following code for modules with no source files
1299 if not self.SourceFileList:
1300 return
1301
1302 # Skip the following code for modules without any binary files
1303 if self.BinaryFileList:
1304 return
1305
1306 ### TODO: How to handles mixed source and binary modules
1307
1308 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1309 # Also find all packages that the DynamicEx PCDs depend on
1310 Pcds = []
1311 PatchablePcds = []
1312 Packages = []
1313 PcdCheckList = []
1314 PcdTokenSpaceList = []
1315 for Pcd in self.ModulePcdList + self.LibraryPcdList:
1316 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
1317 PatchablePcds.append(Pcd)
1318 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
1319 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
1320 if Pcd not in Pcds:
1321 Pcds.append(Pcd)
1322 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
1323 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
1324 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
1325 GuidList = OrderedDict(self.GuidList)
1326 for TokenSpace in self.GetGuidsUsedByPcd:
1327 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1328 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1329 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
1330 GuidList.pop(TokenSpace)
1331 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
1332 for Package in self.DerivedPackageList:
1333 if Package in Packages:
1334 continue
1335 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
1336 Found = False
1337 for Index in range(len(BeChecked)):
1338 for Item in CheckList[Index]:
1339 if Item in BeChecked[Index]:
1340 Packages.append(Package)
1341 Found = True
1342 break
1343 if Found:
1344 break
1345
1346 VfrPcds = self._GetPcdsMaybeUsedByVfr()
1347 for Pkg in self.PlatformInfo.PackageList:
1348 if Pkg in Packages:
1349 continue
1350 for VfrPcd in VfrPcds:
1351 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
1352 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
1353 Packages.append(Pkg)
1354 break
1355
1356 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
1357 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
1358 Guid = self.Guid
1359 MDefs = self.Module.Defines
1360
1361 AsBuiltInfDict = {
1362 'module_name' : self.Name,
1363 'module_guid' : Guid,
1364 'module_module_type' : ModuleType,
1365 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
1366 'pcd_is_driver_string' : [],
1367 'module_uefi_specification_version' : [],
1368 'module_pi_specification_version' : [],
1369 'module_entry_point' : self.Module.ModuleEntryPointList,
1370 'module_unload_image' : self.Module.ModuleUnloadImageList,
1371 'module_constructor' : self.Module.ConstructorList,
1372 'module_destructor' : self.Module.DestructorList,
1373 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
1374 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
1375 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
1376 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
1377 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
1378 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
1379 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
1380 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
1381 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
1382 'module_arch' : self.Arch,
1383 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
1384 'binary_item' : [],
1385 'patchablepcd_item' : [],
1386 'pcd_item' : [],
1387 'protocol_item' : [],
1388 'ppi_item' : [],
1389 'guid_item' : [],
1390 'flags_item' : [],
1391 'libraryclasses_item' : []
1392 }
1393
1394 if 'MODULE_UNI_FILE' in MDefs:
1395 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
1396 if os.path.isfile(UNIFile):
1397 shutil.copy2(UNIFile, self.OutputDir)
1398
1399 if self.AutoGenVersion > int(gInfSpecVersion, 0):
1400 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
1401 else:
1402 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
1403
1404 if DriverType:
1405 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
1406
1407 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
1408 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
1409 if 'PI_SPECIFICATION_VERSION' in self.Specification:
1410 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
1411
1412 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1413 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1414 for Item in self.CodaTargetList:
1415 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1416 if os.path.isabs(File):
1417 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
1418 if Item.Target.Ext.lower() == '.aml':
1419 AsBuiltInfDict['binary_item'].append('ASL|' + File)
1420 elif Item.Target.Ext.lower() == '.acpi':
1421 AsBuiltInfDict['binary_item'].append('ACPI|' + File)
1422 elif Item.Target.Ext.lower() == '.efi':
1423 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
1424 else:
1425 AsBuiltInfDict['binary_item'].append('BIN|' + File)
1426 if not self.DepexGenerated:
1427 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
1428 if os.path.exists(DepexFile):
1429 self.DepexGenerated = True
1430 if self.DepexGenerated:
1431 if self.ModuleType in [SUP_MODULE_PEIM]:
1432 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
1433 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
1434 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
1435 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
1436 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
1437
1438 Bin = self._GenOffsetBin()
1439 if Bin:
1440 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
1441
1442 for Root, Dirs, Files in os.walk(OutputDir):
1443 for File in Files:
1444 if File.lower().endswith('.pdb'):
1445 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
1446 HeaderComments = self.Module.HeaderComments
1447 StartPos = 0
1448 for Index in range(len(HeaderComments)):
1449 if HeaderComments[Index].find('@BinaryHeader') != -1:
1450 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
1451 StartPos = Index
1452 break
1453 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
1454 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
1455
1456 GenList = [
1457 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
1458 (self.PpiList, self._PpiComments, 'ppi_item'),
1459 (GuidList, self._GuidComments, 'guid_item')
1460 ]
1461 for Item in GenList:
1462 for CName in Item[0]:
1463 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
1464 Entry = Comments + '\n ' + CName if Comments else CName
1465 AsBuiltInfDict[Item[2]].append(Entry)
1466 PatchList = parsePcdInfoFromMapFile(
1467 os.path.join(self.OutputDir, self.Name + '.map'),
1468 os.path.join(self.OutputDir, self.Name + '.efi')
1469 )
1470 if PatchList:
1471 for Pcd in PatchablePcds:
1472 TokenCName = Pcd.TokenCName
1473 for PcdItem in GlobalData.MixedPcd:
1474 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1475 TokenCName = PcdItem[0]
1476 break
1477 for PatchPcd in PatchList:
1478 if TokenCName == PatchPcd[0]:
1479 break
1480 else:
1481 continue
1482 PcdValue = ''
1483 if Pcd.DatumType == 'BOOLEAN':
1484 BoolValue = Pcd.DefaultValue.upper()
1485 if BoolValue == 'TRUE':
1486 Pcd.DefaultValue = '1'
1487 elif BoolValue == 'FALSE':
1488 Pcd.DefaultValue = '0'
1489
1490 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
1491 HexFormat = '0x%02x'
1492 if Pcd.DatumType == TAB_UINT16:
1493 HexFormat = '0x%04x'
1494 elif Pcd.DatumType == TAB_UINT32:
1495 HexFormat = '0x%08x'
1496 elif Pcd.DatumType == TAB_UINT64:
1497 HexFormat = '0x%016x'
1498 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
1499 else:
1500 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
1501 EdkLogger.error("build", AUTOGEN_ERROR,
1502 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
1503 )
1504 ArraySize = int(Pcd.MaxDatumSize, 0)
1505 PcdValue = Pcd.DefaultValue
1506 if PcdValue[0] != '{':
1507 Unicode = False
1508 if PcdValue[0] == 'L':
1509 Unicode = True
1510 PcdValue = PcdValue.lstrip('L')
1511 PcdValue = eval(PcdValue)
1512 NewValue = '{'
1513 for Index in range(0, len(PcdValue)):
1514 if Unicode:
1515 CharVal = ord(PcdValue[Index])
1516 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
1517 + '0x%02x' % (CharVal >> 8) + ', '
1518 else:
1519 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
1520 Padding = '0x00, '
1521 if Unicode:
1522 Padding = Padding * 2
1523 ArraySize = ArraySize // 2
1524 if ArraySize < (len(PcdValue) + 1):
1525 if Pcd.MaxSizeUserSet:
1526 EdkLogger.error("build", AUTOGEN_ERROR,
1527 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1528 )
1529 else:
1530 ArraySize = len(PcdValue) + 1
1531 if ArraySize > len(PcdValue) + 1:
1532 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
1533 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
1534 elif len(PcdValue.split(',')) <= ArraySize:
1535 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
1536 PcdValue += '}'
1537 else:
1538 if Pcd.MaxSizeUserSet:
1539 EdkLogger.error("build", AUTOGEN_ERROR,
1540 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1541 )
1542 else:
1543 ArraySize = len(PcdValue) + 1
1544 PcdItem = '%s.%s|%s|0x%X' % \
1545 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
1546 PcdComments = ''
1547 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1548 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
1549 if PcdComments:
1550 PcdItem = PcdComments + '\n ' + PcdItem
1551 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
1552
1553 for Pcd in Pcds + VfrPcds:
1554 PcdCommentList = []
1555 HiiInfo = ''
1556 TokenCName = Pcd.TokenCName
1557 for PcdItem in GlobalData.MixedPcd:
1558 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1559 TokenCName = PcdItem[0]
1560 break
1561 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
1562 for SkuName in Pcd.SkuInfoList:
1563 SkuInfo = Pcd.SkuInfoList[SkuName]
1564 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
1565 break
1566 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1567 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
1568 if HiiInfo:
1569 UsageIndex = -1
1570 UsageStr = ''
1571 for Index, Comment in enumerate(PcdCommentList):
1572 for Usage in UsageList:
1573 if Comment.find(Usage) != -1:
1574 UsageStr = Usage
1575 UsageIndex = Index
1576 break
1577 if UsageIndex != -1:
1578 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
1579 else:
1580 PcdCommentList.append('## UNDEFINED ' + HiiInfo)
1581 PcdComments = '\n '.join(PcdCommentList)
1582 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
1583 if PcdComments:
1584 PcdEntry = PcdComments + '\n ' + PcdEntry
1585 AsBuiltInfDict['pcd_item'].append(PcdEntry)
1586 for Item in self.BuildOption:
1587 if 'FLAGS' in self.BuildOption[Item]:
1588 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
1589
1590 # Generated LibraryClasses section in comments.
1591 for Library in self.LibraryAutoGenList:
1592 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
1593
1594 # Generated UserExtensions TianoCore section.
1595 # All tianocore user extensions are copied.
1596 UserExtStr = ''
1597 for TianoCore in self._GetTianoCoreUserExtensionList():
1598 UserExtStr += '\n'.join(TianoCore)
1599 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
1600 if os.path.isfile(ExtensionFile):
1601 shutil.copy2(ExtensionFile, self.OutputDir)
1602 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
1603
1604 # Generated depex expression section in comments.
1605 DepexExpression = self._GetDepexExpresionString()
1606 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
1607
1608 AsBuiltInf = TemplateString()
1609 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
1610
1611 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
1612
1613 self.IsAsBuiltInfCreated = True
1614
1615 def CacheCopyFile(self, OriginDir, CopyDir, File):
1616 sub_dir = os.path.relpath(File, CopyDir)
1617 destination_file = os.path.join(OriginDir, sub_dir)
1618 destination_dir = os.path.dirname(destination_file)
1619 CreateDirectory(destination_dir)
1620 try:
1621 CopyFileOnChange(File, destination_dir)
1622 except:
1623 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
1624 return
1625
1626 def CopyModuleToCache(self):
1627 self.GenPreMakefileHash(GlobalData.gCacheIR)
1628 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1629 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1630 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1631 return False
1632
1633 self.GenMakeHash(GlobalData.gCacheIR)
1634 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1635 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1636 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1637 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1638 return False
1639
1640 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1641 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)
1642 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)
1643
1644 CreateDirectory (FileDir)
1645 self.SaveHashChainFileToCache(GlobalData.gCacheIR)
1646 ModuleFile = path.join(self.OutputDir, self.Name + '.inf')
1647 if os.path.exists(ModuleFile):
1648 CopyFileOnChange(ModuleFile, FileDir)
1649 if not self.OutputFile:
1650 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
1651 self.OutputFile = Ma.Binaries
1652 for File in self.OutputFile:
1653 File = str(File)
1654 if not os.path.isabs(File):
1655 File = os.path.join(self.OutputDir, File)
1656 if os.path.exists(File):
1657 sub_dir = os.path.relpath(File, self.OutputDir)
1658 destination_file = os.path.join(FileDir, sub_dir)
1659 destination_dir = os.path.dirname(destination_file)
1660 CreateDirectory(destination_dir)
1661 CopyFileOnChange(File, destination_dir)
1662
1663 def SaveHashChainFileToCache(self, gDict):
1664 if not GlobalData.gBinCacheDest:
1665 return False
1666
1667 self.GenPreMakefileHash(gDict)
1668 if not (self.MetaFile.Path, self.Arch) in gDict or \
1669 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1670 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1671 return False
1672
1673 self.GenMakeHash(gDict)
1674 if not (self.MetaFile.Path, self.Arch) in gDict or \
1675 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1676 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1677 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1678 return False
1679
1680 # save the hash chain list as cache file
1681 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1682 CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
1683 CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)
1684 ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")
1685 MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")
1686 ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")
1687
1688 # save the HashChainDict as json file
1689 CreateDirectory (CacheDestDir)
1690 CreateDirectory (CacheHashDestDir)
1691 try:
1692 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1693 if os.path.exists(ModuleHashPair):
1694 f = open(ModuleHashPair, 'r')
1695 ModuleHashPairList = json.load(f)
1696 f.close()
1697 PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
1698 MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
1699 ModuleHashPairList.append((PreMakeHash, MakeHash))
1700 ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))
1701 with open(ModuleHashPair, 'w') as f:
1702 json.dump(ModuleHashPairList, f, indent=2)
1703 except:
1704 EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)
1705 return False
1706
1707 try:
1708 with open(MakeHashChain, 'w') as f:
1709 json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)
1710 except:
1711 EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)
1712 return False
1713
1714 try:
1715 with open(ModuleFilesChain, 'w') as f:
1716 json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)
1717 except:
1718 EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)
1719 return False
1720
1721 # save the autogenfile and makefile for debug usage
1722 CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")
1723 CreateDirectory (CacheDebugDir)
1724 CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)
1725 if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1726 for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1727 CopyFileOnChange(str(File), CacheDebugDir)
1728
1729 return True
1730
1731 ## Create makefile for the module and its dependent libraries
1732 #
1733 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1734 # dependent libraries will be created
1735 #
1736 @cached_class_function
1737 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
1738 gDict = GlobalData.gCacheIR
1739 if (self.MetaFile.Path, self.Arch) in gDict and \
1740 gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
1741 return
1742
1743 # nest this function inside it's only caller.
1744 def CreateTimeStamp():
1745 FileSet = {self.MetaFile.Path}
1746
1747 for SourceFile in self.Module.Sources:
1748 FileSet.add (SourceFile.Path)
1749
1750 for Lib in self.DependentLibraryList:
1751 FileSet.add (Lib.MetaFile.Path)
1752
1753 for f in self.AutoGenDepSet:
1754 FileSet.add (f.Path)
1755
1756 if os.path.exists (self.TimeStampPath):
1757 os.remove (self.TimeStampPath)
1758 with open(self.TimeStampPath, 'w+') as fd:
1759 for f in FileSet:
1760 fd.write(f)
1761 fd.write("\n")
1762
1763 # Ignore generating makefile when it is a binary module
1764 if self.IsBinaryModule:
1765 return
1766
1767 self.GenFfsList = GenFfsList
1768
1769 if not self.IsLibrary and CreateLibraryMakeFile:
1770 for LibraryAutoGen in self.LibraryAutoGenList:
1771 LibraryAutoGen.CreateMakeFile()
1772
1773 # CanSkip uses timestamps to determine build skipping
1774 if self.CanSkip():
1775 return
1776
1777 if len(self.CustomMakefile) == 0:
1778 Makefile = GenMake.ModuleMakefile(self)
1779 else:
1780 Makefile = GenMake.CustomMakefile(self)
1781 if Makefile.Generate():
1782 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
1783 (self.Name, self.Arch))
1784 else:
1785 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
1786 (self.Name, self.Arch))
1787
1788 CreateTimeStamp()
1789
1790 MakefileType = Makefile._FileType
1791 MakefileName = Makefile._FILE_NAME_[MakefileType]
1792 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
1793
1794 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1795 MewIR.MakefilePath = MakefilePath
1796 MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1797 MewIR.CreateMakeFileDone = True
1798 with GlobalData.file_lock:
1799 try:
1800 IR = gDict[(self.MetaFile.Path, self.Arch)]
1801 IR.MakefilePath = MakefilePath
1802 IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1803 IR.CreateMakeFileDone = True
1804 gDict[(self.MetaFile.Path, self.Arch)] = IR
1805 except:
1806 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1807
1808 def CopyBinaryFiles(self):
1809 for File in self.Module.Binaries:
1810 SrcPath = File.Path
1811 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
1812 CopyLongFilePath(SrcPath, DstPath)
1813 ## Create autogen code for the module and its dependent libraries
1814 #
1815 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1816 # dependent libraries will be created
1817 #
1818 def CreateCodeFile(self, CreateLibraryCodeFile=True):
1819 gDict = GlobalData.gCacheIR
1820 if (self.MetaFile.Path, self.Arch) in gDict and \
1821 gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
1822 return
1823
1824 if self.IsCodeFileCreated:
1825 return
1826
1827 # Need to generate PcdDatabase even PcdDriver is binarymodule
1828 if self.IsBinaryModule and self.PcdIsDriver != '':
1829 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
1830 return
1831 if self.IsBinaryModule:
1832 if self.IsLibrary:
1833 self.CopyBinaryFiles()
1834 return
1835
1836 if not self.IsLibrary and CreateLibraryCodeFile:
1837 for LibraryAutoGen in self.LibraryAutoGenList:
1838 LibraryAutoGen.CreateCodeFile()
1839
1840 # CanSkip uses timestamps to determine build skipping
1841 if self.CanSkip():
1842 return
1843
1844 AutoGenList = []
1845 IgoredAutoGenList = []
1846
1847 for File in self.AutoGenFileList:
1848 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
1849 AutoGenList.append(str(File))
1850 else:
1851 IgoredAutoGenList.append(str(File))
1852
1853
1854 for ModuleType in self.DepexList:
1855 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1856 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
1857 continue
1858
1859 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
1860 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
1861
1862 if len(Dpx.PostfixNotation) != 0:
1863 self.DepexGenerated = True
1864
1865 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
1866 AutoGenList.append(str(DpxFile))
1867 else:
1868 IgoredAutoGenList.append(str(DpxFile))
1869
1870 if IgoredAutoGenList == []:
1871 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
1872 (" ".join(AutoGenList), self.Name, self.Arch))
1873 elif AutoGenList == []:
1874 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
1875 (" ".join(IgoredAutoGenList), self.Name, self.Arch))
1876 else:
1877 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
1878 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
1879
1880 self.IsCodeFileCreated = True
1881 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1882 MewIR.CreateCodeFileDone = True
1883 with GlobalData.file_lock:
1884 try:
1885 IR = gDict[(self.MetaFile.Path, self.Arch)]
1886 IR.CreateCodeFileDone = True
1887 gDict[(self.MetaFile.Path, self.Arch)] = IR
1888 except:
1889 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1890
1891 return AutoGenList
1892
1893 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1894 @cached_property
1895 def LibraryAutoGenList(self):
1896 RetVal = []
1897 for Library in self.DependentLibraryList:
1898 La = ModuleAutoGen(
1899 self.Workspace,
1900 Library.MetaFile,
1901 self.BuildTarget,
1902 self.ToolChain,
1903 self.Arch,
1904 self.PlatformInfo.MetaFile,
1905 self.DataPipe
1906 )
1907 La.IsLibrary = True
1908 if La not in RetVal:
1909 RetVal.append(La)
1910 for Lib in La.CodaTargetList:
1911 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
1912 return RetVal
1913
1914 def GenModuleHash(self):
1915 # Initialize a dictionary for each arch type
1916 if self.Arch not in GlobalData.gModuleHash:
1917 GlobalData.gModuleHash[self.Arch] = {}
1918
1919 # Early exit if module or library has been hashed and is in memory
1920 if self.Name in GlobalData.gModuleHash[self.Arch]:
1921 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1922
1923 # Initialze hash object
1924 m = hashlib.md5()
1925
1926 # Add Platform level hash
1927 m.update(GlobalData.gPlatformHash.encode('utf-8'))
1928
1929 # Add Package level hash
1930 if self.DependentPackageList:
1931 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
1932 if Pkg.PackageName in GlobalData.gPackageHash:
1933 m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))
1934
1935 # Add Library hash
1936 if self.LibraryAutoGenList:
1937 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
1938 if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
1939 Lib.GenModuleHash()
1940 m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
1941
1942 # Add Module self
1943 f = open(str(self.MetaFile), 'rb')
1944 Content = f.read()
1945 f.close()
1946 m.update(Content)
1947
1948 # Add Module's source files
1949 if self.SourceFileList:
1950 for File in sorted(self.SourceFileList, key=lambda x: str(x)):
1951 f = open(str(File), 'rb')
1952 Content = f.read()
1953 f.close()
1954 m.update(Content)
1955
1956 GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()
1957
1958 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1959
1960 def GenModuleFilesHash(self, gDict):
1961 # Early exit if module or library has been hashed and is in memory
1962 if (self.MetaFile.Path, self.Arch) in gDict:
1963 if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:
1964 return gDict[(self.MetaFile.Path, self.Arch)]
1965
1966 DependencyFileSet = set()
1967 # Add Module Meta file
1968 DependencyFileSet.add(self.MetaFile)
1969
1970 # Add Module's source files
1971 if self.SourceFileList:
1972 for File in set(self.SourceFileList):
1973 DependencyFileSet.add(File)
1974
1975 # Add modules's include header files
1976 # Search dependency file list for each source file
1977 SourceFileList = []
1978 OutPutFileList = []
1979 for Target in self.IntroTargetList:
1980 SourceFileList.extend(Target.Inputs)
1981 OutPutFileList.extend(Target.Outputs)
1982 if OutPutFileList:
1983 for Item in OutPutFileList:
1984 if Item in SourceFileList:
1985 SourceFileList.remove(Item)
1986 SearchList = []
1987 for file_path in self.IncludePathList + self.BuildOptionIncPathList:
1988 # skip the folders in platform BuildDir which are not been generated yet
1989 if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):
1990 continue
1991 SearchList.append(file_path)
1992 FileDependencyDict = {}
1993 ForceIncludedFile = []
1994 for F in SourceFileList:
1995 # skip the files which are not been generated yet, because
1996 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
1997 if not os.path.exists(F.Path):
1998 continue
1999 FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)
2000
2001 if FileDependencyDict:
2002 for Dependency in FileDependencyDict.values():
2003 DependencyFileSet.update(set(Dependency))
2004
2005 # Caculate all above dependency files hash
2006 # Initialze hash object
2007 FileList = []
2008 m = hashlib.md5()
2009 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2010 if not os.path.exists(str(File)):
2011 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2012 continue
2013 f = open(str(File), 'rb')
2014 Content = f.read()
2015 f.close()
2016 m.update(Content)
2017 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2018
2019
2020 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
2021 MewIR.ModuleFilesHashDigest = m.digest()
2022 MewIR.ModuleFilesHashHexDigest = m.hexdigest()
2023 MewIR.ModuleFilesChain = FileList
2024 with GlobalData.file_lock:
2025 try:
2026 IR = gDict[(self.MetaFile.Path, self.Arch)]
2027 IR.ModuleFilesHashDigest = m.digest()
2028 IR.ModuleFilesHashHexDigest = m.hexdigest()
2029 IR.ModuleFilesChain = FileList
2030 gDict[(self.MetaFile.Path, self.Arch)] = IR
2031 except:
2032 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
2033
2034 return gDict[(self.MetaFile.Path, self.Arch)]
2035
2036 def GenPreMakefileHash(self, gDict):
2037 # Early exit if module or library has been hashed and is in memory
2038 if (self.MetaFile.Path, self.Arch) in gDict and \
2039 gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2040 return gDict[(self.MetaFile.Path, self.Arch)]
2041
2042 # skip binary module
2043 if self.IsBinaryModule:
2044 return
2045
2046 if not (self.MetaFile.Path, self.Arch) in gDict or \
2047 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2048 self.GenModuleFilesHash(gDict)
2049
2050 if not (self.MetaFile.Path, self.Arch) in gDict or \
2051 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2052 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2053 return
2054
2055 # Initialze hash object
2056 m = hashlib.md5()
2057
2058 # Add Platform level hash
2059 if ('PlatformHash') in gDict:
2060 m.update(gDict[('PlatformHash')].encode('utf-8'))
2061 else:
2062 EdkLogger.quiet("[cache warning]: PlatformHash is missing")
2063
2064 # Add Package level hash
2065 if self.DependentPackageList:
2066 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
2067 if (Pkg.PackageName, 'PackageHash') in gDict:
2068 m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))
2069 else:
2070 EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))
2071
2072 # Add Library hash
2073 if self.LibraryAutoGenList:
2074 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2075 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2076 not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:
2077 Lib.GenPreMakefileHash(gDict)
2078 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)
2079
2080 # Add Module self
2081 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2082
2083 with GlobalData.file_lock:
2084 IR = gDict[(self.MetaFile.Path, self.Arch)]
2085 IR.PreMakefileHashHexDigest = m.hexdigest()
2086 gDict[(self.MetaFile.Path, self.Arch)] = IR
2087
2088 return gDict[(self.MetaFile.Path, self.Arch)]
2089
2090 def GenMakeHeaderFilesHash(self, gDict):
2091 # Early exit if module or library has been hashed and is in memory
2092 if (self.MetaFile.Path, self.Arch) in gDict and \
2093 gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2094 return gDict[(self.MetaFile.Path, self.Arch)]
2095
2096 # skip binary module
2097 if self.IsBinaryModule:
2098 return
2099
2100 if not (self.MetaFile.Path, self.Arch) in gDict or \
2101 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
2102 if self.IsLibrary:
2103 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:
2104 self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2105 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:
2106 self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2107 self.CreateCodeFile()
2108 if not (self.MetaFile.Path, self.Arch) in gDict or \
2109 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2110 self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))
2111
2112 if not (self.MetaFile.Path, self.Arch) in gDict or \
2113 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \
2114 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2115 EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2116 return
2117
2118 DependencyFileSet = set()
2119 # Add Makefile
2120 if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:
2121 DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)
2122 else:
2123 EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2124
2125 # Add header files
2126 if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2127 for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2128 DependencyFileSet.add(File)
2129 else:
2130 EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2131
2132 # Add AutoGen files
2133 if self.AutoGenFileList:
2134 for File in set(self.AutoGenFileList):
2135 DependencyFileSet.add(File)
2136
2137 # Caculate all above dependency files hash
2138 # Initialze hash object
2139 FileList = []
2140 m = hashlib.md5()
2141 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2142 if not os.path.exists(str(File)):
2143 EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2144 continue
2145 f = open(str(File), 'rb')
2146 Content = f.read()
2147 f.close()
2148 m.update(Content)
2149 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2150
2151 with GlobalData.file_lock:
2152 IR = gDict[(self.MetaFile.Path, self.Arch)]
2153 IR.AutoGenFileList = self.AutoGenFileList.keys()
2154 IR.MakeHeaderFilesHashChain = FileList
2155 IR.MakeHeaderFilesHashDigest = m.digest()
2156 gDict[(self.MetaFile.Path, self.Arch)] = IR
2157
2158 return gDict[(self.MetaFile.Path, self.Arch)]
2159
2160 def GenMakeHash(self, gDict):
2161 # Early exit if module or library has been hashed and is in memory
2162 if (self.MetaFile.Path, self.Arch) in gDict and \
2163 gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2164 return gDict[(self.MetaFile.Path, self.Arch)]
2165
2166 # skip binary module
2167 if self.IsBinaryModule:
2168 return
2169
2170 if not (self.MetaFile.Path, self.Arch) in gDict or \
2171 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2172 self.GenModuleFilesHash(gDict)
2173 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2174 self.GenMakeHeaderFilesHash(gDict)
2175
2176 if not (self.MetaFile.Path, self.Arch) in gDict or \
2177 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \
2178 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \
2179 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \
2180 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:
2181 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2182 return
2183
2184 # Initialze hash object
2185 m = hashlib.md5()
2186 MakeHashChain = []
2187
2188 # Add hash of makefile and dependency header files
2189 m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)
2190 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))
2191 New.sort(key=lambda x: str(x))
2192 MakeHashChain += New
2193
2194 # Add Library hash
2195 if self.LibraryAutoGenList:
2196 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2197 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2198 not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:
2199 Lib.GenMakeHash(gDict)
2200 if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:
2201 print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)
2202 continue
2203 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)
2204 New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))
2205 New.sort(key=lambda x: str(x))
2206 MakeHashChain += New
2207
2208 # Add Module self
2209 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2210 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))
2211 New.sort(key=lambda x: str(x))
2212 MakeHashChain += New
2213
2214 with GlobalData.file_lock:
2215 IR = gDict[(self.MetaFile.Path, self.Arch)]
2216 IR.MakeHashDigest = m.digest()
2217 IR.MakeHashHexDigest = m.hexdigest()
2218 IR.MakeHashChain = MakeHashChain
2219 gDict[(self.MetaFile.Path, self.Arch)] = IR
2220
2221 return gDict[(self.MetaFile.Path, self.Arch)]
2222
2223 ## Decide whether we can skip the left autogen and make process
2224 def CanSkipbyPreMakefileCache(self, gDict):
2225 if not GlobalData.gBinCacheSource:
2226 return False
2227
2228 # If Module is binary, do not skip by cache
2229 if self.IsBinaryModule:
2230 return False
2231
2232 # .inc is contains binary information so do not skip by hash as well
2233 for f_ext in self.SourceFileList:
2234 if '.inc' in str(f_ext):
2235 return False
2236
2237 # Get the module hash values from stored cache and currrent build
2238 # then check whether cache hit based on the hash values
2239 # if cache hit, restore all the files from cache
2240 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2241 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2242
2243 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2244 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2245 if not os.path.exists(ModuleHashPair):
2246 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2247 return False
2248
2249 try:
2250 f = open(ModuleHashPair, 'r')
2251 ModuleHashPairList = json.load(f)
2252 f.close()
2253 except:
2254 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2255 return False
2256
2257 self.GenPreMakefileHash(gDict)
2258 if not (self.MetaFile.Path, self.Arch) in gDict or \
2259 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2260 EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2261 return False
2262
2263 MakeHashStr = None
2264 CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
2265 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2266 if PreMakefileHash == CurrentPreMakeHash:
2267 MakeHashStr = str(MakeHash)
2268
2269 if not MakeHashStr:
2270 return False
2271
2272 TargetHashDir = path.join(FileDir, MakeHashStr)
2273 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2274
2275 if not os.path.exists(TargetHashDir):
2276 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2277 return False
2278
2279 for root, dir, files in os.walk(TargetHashDir):
2280 for f in files:
2281 File = path.join(root, f)
2282 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2283 if os.path.exists(TargetFfsHashDir):
2284 for root, dir, files in os.walk(TargetFfsHashDir):
2285 for f in files:
2286 File = path.join(root, f)
2287 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2288
2289 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2290 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2291
2292 with GlobalData.file_lock:
2293 IR = gDict[(self.MetaFile.Path, self.Arch)]
2294 IR.PreMakeCacheHit = True
2295 gDict[(self.MetaFile.Path, self.Arch)] = IR
2296 print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)
2297 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2298 return True
2299
2300 ## Decide whether we can skip the make process
2301 def CanSkipbyMakeCache(self, gDict):
2302 if not GlobalData.gBinCacheSource:
2303 return False
2304
2305 # If Module is binary, do not skip by cache
2306 if self.IsBinaryModule:
2307 print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)
2308 return False
2309
2310 # .inc is contains binary information so do not skip by hash as well
2311 for f_ext in self.SourceFileList:
2312 if '.inc' in str(f_ext):
2313 with GlobalData.file_lock:
2314 IR = gDict[(self.MetaFile.Path, self.Arch)]
2315 IR.MakeCacheHit = False
2316 gDict[(self.MetaFile.Path, self.Arch)] = IR
2317 print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)
2318 return False
2319
2320 # Get the module hash values from stored cache and currrent build
2321 # then check whether cache hit based on the hash values
2322 # if cache hit, restore all the files from cache
2323 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2324 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2325
2326 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2327 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2328 if not os.path.exists(ModuleHashPair):
2329 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2330 return False
2331
2332 try:
2333 f = open(ModuleHashPair, 'r')
2334 ModuleHashPairList = json.load(f)
2335 f.close()
2336 except:
2337 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2338 return False
2339
2340 self.GenMakeHash(gDict)
2341 if not (self.MetaFile.Path, self.Arch) in gDict or \
2342 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
2343 EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2344 return False
2345
2346 MakeHashStr = None
2347 CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
2348 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2349 if MakeHash == CurrentMakeHash:
2350 MakeHashStr = str(MakeHash)
2351
2352 if not MakeHashStr:
2353 print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2354 return False
2355
2356 TargetHashDir = path.join(FileDir, MakeHashStr)
2357 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2358 if not os.path.exists(TargetHashDir):
2359 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2360 return False
2361
2362 for root, dir, files in os.walk(TargetHashDir):
2363 for f in files:
2364 File = path.join(root, f)
2365 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2366
2367 if os.path.exists(TargetFfsHashDir):
2368 for root, dir, files in os.walk(TargetFfsHashDir):
2369 for f in files:
2370 File = path.join(root, f)
2371 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2372
2373 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2374 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2375 with GlobalData.file_lock:
2376 IR = gDict[(self.MetaFile.Path, self.Arch)]
2377 IR.MakeCacheHit = True
2378 gDict[(self.MetaFile.Path, self.Arch)] = IR
2379 print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2380 return True
2381
2382 ## Show the first file name which causes cache miss
2383 def PrintFirstMakeCacheMissFile(self, gDict):
2384 if not GlobalData.gBinCacheSource:
2385 return
2386
2387 # skip binary module
2388 if self.IsBinaryModule:
2389 return
2390
2391 if not (self.MetaFile.Path, self.Arch) in gDict:
2392 return
2393
2394 # Only print cache miss file for the MakeCache not hit module
2395 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2396 return
2397
2398 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2399 EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))
2400 return
2401
2402 # Find the cache dir name through the .ModuleHashPair file info
2403 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2404
2405 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2406 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2407 if not os.path.exists(ModuleHashPair):
2408 EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2409 return
2410
2411 try:
2412 f = open(ModuleHashPair, 'r')
2413 ModuleHashPairList = json.load(f)
2414 f.close()
2415 except:
2416 EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2417 return
2418
2419 MakeHashSet = set()
2420 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2421 TargetHashDir = path.join(FileDir, str(MakeHash))
2422 if os.path.exists(TargetHashDir):
2423 MakeHashSet.add(MakeHash)
2424 if not MakeHashSet:
2425 EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2426 return
2427
2428 TargetHash = list(MakeHashSet)[0]
2429 TargetHashDir = path.join(FileDir, str(TargetHash))
2430 if len(MakeHashSet) > 1 :
2431 EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))
2432
2433 ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')
2434 if os.path.exists(ListFile):
2435 try:
2436 f = open(ListFile, 'r')
2437 CachedList = json.load(f)
2438 f.close()
2439 except:
2440 EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)
2441 return
2442 else:
2443 EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)
2444 return
2445
2446 CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain
2447 for idx, (file, hash) in enumerate (CurrentList):
2448 (filecached, hashcached) = CachedList[idx]
2449 if file != filecached:
2450 EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))
2451 break
2452 if hash != hashcached:
2453 EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))
2454 break
2455
2456 return True
2457
2458 ## Decide whether we can skip the ModuleAutoGen process
2459 def CanSkipbyCache(self, gDict):
2460 # Hashing feature is off
2461 if not GlobalData.gBinCacheSource:
2462 return False
2463
2464 if self in GlobalData.gBuildHashSkipTracking:
2465 return GlobalData.gBuildHashSkipTracking[self]
2466
2467 # If library or Module is binary do not skip by hash
2468 if self.IsBinaryModule:
2469 GlobalData.gBuildHashSkipTracking[self] = False
2470 return False
2471
2472 # .inc is contains binary information so do not skip by hash as well
2473 for f_ext in self.SourceFileList:
2474 if '.inc' in str(f_ext):
2475 GlobalData.gBuildHashSkipTracking[self] = False
2476 return False
2477
2478 if not (self.MetaFile.Path, self.Arch) in gDict:
2479 return False
2480
2481 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:
2482 GlobalData.gBuildHashSkipTracking[self] = True
2483 return True
2484
2485 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2486 GlobalData.gBuildHashSkipTracking[self] = True
2487 return True
2488
2489 return False
2490
2491 ## Decide whether we can skip the ModuleAutoGen process
2492 # If any source file is newer than the module than we cannot skip
2493 #
2494 def CanSkip(self):
2495 # Don't skip if cache feature enabled
2496 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
2497 return False
2498 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
2499 return True
2500 if not os.path.exists(self.TimeStampPath):
2501 return False
2502 #last creation time of the module
2503 DstTimeStamp = os.stat(self.TimeStampPath)[8]
2504
2505 SrcTimeStamp = self.Workspace._SrcTimeStamp
2506 if SrcTimeStamp > DstTimeStamp:
2507 return False
2508
2509 with open(self.TimeStampPath,'r') as f:
2510 for source in f:
2511 source = source.rstrip('\n')
2512 if not os.path.exists(source):
2513 return False
2514 if source not in ModuleAutoGen.TimeDict :
2515 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
2516 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
2517 return False
2518 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
2519 return True
2520
2521 @cached_property
2522 def TimeStampPath(self):
2523 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')