]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: Add GenFds multi-thread support in build cache
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 from AutoGen.AutoGen import AutoGen
9 from Common.LongFilePathSupport import CopyLongFilePath
10 from Common.BuildToolError import *
11 from Common.DataType import *
12 from Common.Misc import *
13 from Common.StringUtils import NormPath,GetSplitList
14 from collections import defaultdict
15 from Workspace.WorkspaceCommon import OrderedListDict
16 import os.path as path
17 import copy
18 import hashlib
19 from . import InfSectionParser
20 from . import GenC
21 from . import GenMake
22 from . import GenDepex
23 from io import BytesIO
24 from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
25 from Workspace.MetaFileCommentParser import UsageList
26 from .GenPcdDb import CreatePcdDatabaseCode
27 from Common.caching import cached_class_function
28 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
29 from AutoGen.CacheIR import ModuleBuildCacheIR
30 import json
31
32 ## Mapping Makefile type
33 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
34 #
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
36 # is the former use /I , the Latter used -I to specify include directories
37 #
38 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
39 gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
40
41 ## default file name for AutoGen
42 gAutoGenCodeFileName = "AutoGen.c"
43 gAutoGenHeaderFileName = "AutoGen.h"
44 gAutoGenStringFileName = "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName = "%(module_name)s.depex"
47 gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
49 gInfSpecVersion = "0x00010017"
50
51 #
52 # Match name = variable
53 #
54 gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
55 #
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
58 #
59 gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
60
61 #
62 # Template string to generic AsBuilt INF
63 #
64 gAsBuiltInfHeaderString = TemplateString("""${header_comments}
65
66 # DO NOT EDIT
67 # FILE auto-generated
68
69 [Defines]
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
91
92 [Packages.${module_arch}]${BEGIN}
93 ${package_item}${END}
94
95 [Binaries.${module_arch}]${BEGIN}
96 ${binary_item}${END}
97
98 [PatchPcd.${module_arch}]${BEGIN}
99 ${patchablepcd_item}
100 ${END}
101
102 [Protocols.${module_arch}]${BEGIN}
103 ${protocol_item}
104 ${END}
105
106 [Ppis.${module_arch}]${BEGIN}
107 ${ppi_item}
108 ${END}
109
110 [Guids.${module_arch}]${BEGIN}
111 ${guid_item}
112 ${END}
113
114 [PcdEx.${module_arch}]${BEGIN}
115 ${pcd_item}
116 ${END}
117
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
121
122 ${depexsection_item}
123
124 ${userextension_tianocore_item}
125
126 ${tail_comments}
127
128 [BuildOptions.${module_arch}]
129 ## @AsBuilt${BEGIN}
130 ## ${flags_item}${END}
131 """)
132 #
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
135 #
136 def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
137 for Key in CopyFromDict:
138 CopyToDict[Key].extend(CopyFromDict[Key])
139
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList):
142 RetVal = path.join(*PathList)
143 CreateDirectory(RetVal)
144 return RetVal
145
146 #
147 # Convert string to C format array
148 #
149 def _ConvertStringToByteArray(Value):
150 Value = Value.strip()
151 if not Value:
152 return None
153 if Value[0] == '{':
154 if not Value.endswith('}'):
155 return None
156 Value = Value.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields = Value.split(',')
158 try:
159 for Index in range(len(ValFields)):
160 ValFields[Index] = str(int(ValFields[Index], 0))
161 except ValueError:
162 return None
163 Value = '{' + ','.join(ValFields) + '}'
164 return Value
165
166 Unicode = False
167 if Value.startswith('L"'):
168 if not Value.endswith('"'):
169 return None
170 Value = Value[1:]
171 Unicode = True
172 elif not Value.startswith('"') or not Value.endswith('"'):
173 return None
174
175 Value = eval(Value) # translate escape character
176 NewValue = '{'
177 for Index in range(0, len(Value)):
178 if Unicode:
179 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
180 else:
181 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
182 Value = NewValue + '0}'
183 return Value
184
185 ## ModuleAutoGen class
186 #
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
190 #
191 class ModuleAutoGen(AutoGen):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
194 if not hasattr(self, "_Init"):
195 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
196 self._Init = True
197
198 ## Cache the timestamps of metafiles of every module in a class attribute
199 #
200 TimeDict = {}
201
202 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
205 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
206 % (MetaFile, Arch))
207 return None
208 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
209
210 ## Initialize ModuleAutoGen
211 #
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
218 #
219 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
220 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
221 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
222
223 self.Workspace = Workspace
224 self.WorkspaceDir = ""
225 self.PlatformInfo = None
226 self.DataPipe = DataPipe
227 self.__init_platform_info__()
228 self.MetaFile = ModuleFile
229 self.SourceDir = self.MetaFile.SubDir
230 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
231
232 self.ToolChain = Toolchain
233 self.BuildTarget = Target
234 self.Arch = Arch
235 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
236 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
237
238 self.IsCodeFileCreated = False
239 self.IsAsBuiltInfCreated = False
240 self.DepexGenerated = False
241
242 self.BuildDatabase = self.Workspace.BuildDatabase
243 self.BuildRuleOrder = None
244 self.BuildTime = 0
245
246 self._GuidComments = OrderedListDict()
247 self._ProtocolComments = OrderedListDict()
248 self._PpiComments = OrderedListDict()
249 self._BuildTargets = None
250 self._IntroBuildTargetList = None
251 self._FinalBuildTargetList = None
252 self._FileTypes = None
253
254 self.AutoGenDepSet = set()
255 self.ReferenceModules = []
256 self.ConstPcd = {}
257 self.Makefile = None
258 self.FileDependCache = {}
259
260 def __init_platform_info__(self):
261 pinfo = self.DataPipe.Get("P_Info")
262 self.WorkspaceDir = pinfo.get("WorkspaceDir")
263 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
264 ## hash() operator of ModuleAutoGen
265 #
266 # The module file path and arch string will be used to represent
267 # hash value of this object
268 #
269 # @retval int Hash value of the module file path and arch
270 #
271 @cached_class_function
272 def __hash__(self):
273 return hash((self.MetaFile, self.Arch))
274 def __repr__(self):
275 return "%s [%s]" % (self.MetaFile, self.Arch)
276
277 # Get FixedAtBuild Pcds of this Module
278 @cached_property
279 def FixedAtBuildPcds(self):
280 RetVal = []
281 for Pcd in self.ModulePcdList:
282 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
283 continue
284 if Pcd not in RetVal:
285 RetVal.append(Pcd)
286 return RetVal
287
288 @cached_property
289 def FixedVoidTypePcds(self):
290 RetVal = {}
291 for Pcd in self.FixedAtBuildPcds:
292 if Pcd.DatumType == TAB_VOID:
293 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
294 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
295 return RetVal
296
297 @property
298 def UniqueBaseName(self):
299 ModuleNames = self.DataPipe.Get("M_Name")
300 if not ModuleNames:
301 return self.Name
302 return ModuleNames.get((self.Name,self.MetaFile),self.Name)
303
304 # Macros could be used in build_rule.txt (also Makefile)
305 @cached_property
306 def Macros(self):
307 return OrderedDict((
308 ("WORKSPACE" ,self.WorkspaceDir),
309 ("MODULE_NAME" ,self.Name),
310 ("MODULE_NAME_GUID" ,self.UniqueBaseName),
311 ("MODULE_GUID" ,self.Guid),
312 ("MODULE_VERSION" ,self.Version),
313 ("MODULE_TYPE" ,self.ModuleType),
314 ("MODULE_FILE" ,str(self.MetaFile)),
315 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
316 ("MODULE_RELATIVE_DIR" ,self.SourceDir),
317 ("MODULE_DIR" ,self.SourceDir),
318 ("BASE_NAME" ,self.Name),
319 ("ARCH" ,self.Arch),
320 ("TOOLCHAIN" ,self.ToolChain),
321 ("TOOLCHAIN_TAG" ,self.ToolChain),
322 ("TOOL_CHAIN_TAG" ,self.ToolChain),
323 ("TARGET" ,self.BuildTarget),
324 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
325 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
326 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
327 ("MODULE_BUILD_DIR" ,self.BuildDir),
328 ("OUTPUT_DIR" ,self.OutputDir),
329 ("DEBUG_DIR" ,self.DebugDir),
330 ("DEST_DIR_OUTPUT" ,self.OutputDir),
331 ("DEST_DIR_DEBUG" ,self.DebugDir),
332 ("PLATFORM_NAME" ,self.PlatformInfo.Name),
333 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
334 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
335 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
336 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
337 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
338 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
339 ))
340
341 ## Return the module build data object
342 @cached_property
343 def Module(self):
344 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
345
346 ## Return the module name
347 @cached_property
348 def Name(self):
349 return self.Module.BaseName
350
351 ## Return the module DxsFile if exist
352 @cached_property
353 def DxsFile(self):
354 return self.Module.DxsFile
355
356 ## Return the module meta-file GUID
357 @cached_property
358 def Guid(self):
359 #
360 # To build same module more than once, the module path with FILE_GUID overridden has
361 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
362 # in DSC. The overridden GUID can be retrieved from file name
363 #
364 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
365 #
366 # Length of GUID is 36
367 #
368 return os.path.basename(self.MetaFile.Path)[:36]
369 return self.Module.Guid
370
371 ## Return the module version
372 @cached_property
373 def Version(self):
374 return self.Module.Version
375
376 ## Return the module type
377 @cached_property
378 def ModuleType(self):
379 return self.Module.ModuleType
380
381 ## Return the component type (for Edk.x style of module)
382 @cached_property
383 def ComponentType(self):
384 return self.Module.ComponentType
385
386 ## Return the build type
387 @cached_property
388 def BuildType(self):
389 return self.Module.BuildType
390
391 ## Return the PCD_IS_DRIVER setting
392 @cached_property
393 def PcdIsDriver(self):
394 return self.Module.PcdIsDriver
395
396 ## Return the autogen version, i.e. module meta-file version
397 @cached_property
398 def AutoGenVersion(self):
399 return self.Module.AutoGenVersion
400
401 ## Check if the module is library or not
402 @cached_property
403 def IsLibrary(self):
404 return bool(self.Module.LibraryClass)
405
406 ## Check if the module is binary module or not
407 @cached_property
408 def IsBinaryModule(self):
409 return self.Module.IsBinaryModule
410
411 ## Return the directory to store intermediate files of the module
412 @cached_property
413 def BuildDir(self):
414 return _MakeDir((
415 self.PlatformInfo.BuildDir,
416 self.Arch,
417 self.SourceDir,
418 self.MetaFile.BaseName
419 ))
420
421 ## Return the directory to store the intermediate object files of the module
422 @cached_property
423 def OutputDir(self):
424 return _MakeDir((self.BuildDir, "OUTPUT"))
425
426 ## Return the directory path to store ffs file
427 @cached_property
428 def FfsOutputDir(self):
429 if GlobalData.gFdfParser:
430 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
431 return ''
432
433 ## Return the directory to store auto-gened source files of the module
434 @cached_property
435 def DebugDir(self):
436 return _MakeDir((self.BuildDir, "DEBUG"))
437
438 ## Return the path of custom file
439 @cached_property
440 def CustomMakefile(self):
441 RetVal = {}
442 for Type in self.Module.CustomMakefile:
443 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
444 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
445 RetVal[MakeType] = File
446 return RetVal
447
448 ## Return the directory of the makefile
449 #
450 # @retval string The directory string of module's makefile
451 #
452 @cached_property
453 def MakeFileDir(self):
454 return self.BuildDir
455
456 ## Return build command string
457 #
458 # @retval string Build command string
459 #
460 @cached_property
461 def BuildCommand(self):
462 return self.PlatformInfo.BuildCommand
463
464 ## Get object list of all packages the module and its dependent libraries belong to
465 #
466 # @retval list The list of package object
467 #
468 @cached_property
469 def DerivedPackageList(self):
470 PackageList = []
471 for M in [self.Module] + self.DependentLibraryList:
472 for Package in M.Packages:
473 if Package in PackageList:
474 continue
475 PackageList.append(Package)
476 return PackageList
477
478 ## Get the depex string
479 #
480 # @return : a string contain all depex expression.
481 def _GetDepexExpresionString(self):
482 DepexStr = ''
483 DepexList = []
484 ## DPX_SOURCE IN Define section.
485 if self.Module.DxsFile:
486 return DepexStr
487 for M in [self.Module] + self.DependentLibraryList:
488 Filename = M.MetaFile.Path
489 InfObj = InfSectionParser.InfSectionParser(Filename)
490 DepexExpressionList = InfObj.GetDepexExpresionList()
491 for DepexExpression in DepexExpressionList:
492 for key in DepexExpression:
493 Arch, ModuleType = key
494 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
495 # the type of build module is USER_DEFINED.
496 # All different DEPEX section tags would be copied into the As Built INF file
497 # and there would be separate DEPEX section tags
498 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
499 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
500 DepexList.append({(Arch, ModuleType): DepexExpr})
501 else:
502 if Arch.upper() == TAB_ARCH_COMMON or \
503 (Arch.upper() == self.Arch.upper() and \
504 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
505 DepexList.append({(Arch, ModuleType): DepexExpr})
506
507 #the type of build module is USER_DEFINED.
508 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
509 for Depex in DepexList:
510 for key in Depex:
511 DepexStr += '[Depex.%s.%s]\n' % key
512 DepexStr += '\n'.join('# '+ val for val in Depex[key])
513 DepexStr += '\n\n'
514 if not DepexStr:
515 return '[Depex.%s]\n' % self.Arch
516 return DepexStr
517
518 #the type of build module not is USER_DEFINED.
519 Count = 0
520 for Depex in DepexList:
521 Count += 1
522 if DepexStr != '':
523 DepexStr += ' AND '
524 DepexStr += '('
525 for D in Depex.values():
526 DepexStr += ' '.join(val for val in D)
527 Index = DepexStr.find('END')
528 if Index > -1 and Index == len(DepexStr) - 3:
529 DepexStr = DepexStr[:-3]
530 DepexStr = DepexStr.strip()
531 DepexStr += ')'
532 if Count == 1:
533 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
534 if not DepexStr:
535 return '[Depex.%s]\n' % self.Arch
536 return '[Depex.%s]\n# ' % self.Arch + DepexStr
537
538 ## Merge dependency expression
539 #
540 # @retval list The token list of the dependency expression after parsed
541 #
542 @cached_property
543 def DepexList(self):
544 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
545 return {}
546
547 DepexList = []
548 #
549 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
550 #
551 FixedVoidTypePcds = {}
552 for M in [self] + self.LibraryAutoGenList:
553 FixedVoidTypePcds.update(M.FixedVoidTypePcds)
554 for M in [self] + self.LibraryAutoGenList:
555 Inherited = False
556 for D in M.Module.Depex[self.Arch, self.ModuleType]:
557 if DepexList != []:
558 DepexList.append('AND')
559 DepexList.append('(')
560 #replace D with value if D is FixedAtBuild PCD
561 NewList = []
562 for item in D:
563 if '.' not in item:
564 NewList.append(item)
565 else:
566 try:
567 Value = FixedVoidTypePcds[item]
568 if len(Value.split(',')) != 16:
569 EdkLogger.error("build", FORMAT_INVALID,
570 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
571 NewList.append(Value)
572 except:
573 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
574
575 DepexList.extend(NewList)
576 if DepexList[-1] == 'END': # no need of a END at this time
577 DepexList.pop()
578 DepexList.append(')')
579 Inherited = True
580 if Inherited:
581 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
582 if 'BEFORE' in DepexList or 'AFTER' in DepexList:
583 break
584 if len(DepexList) > 0:
585 EdkLogger.verbose('')
586 return {self.ModuleType:DepexList}
587
588 ## Merge dependency expression
589 #
590 # @retval list The token list of the dependency expression after parsed
591 #
592 @cached_property
593 def DepexExpressionDict(self):
594 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
595 return {}
596
597 DepexExpressionString = ''
598 #
599 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
600 #
601 for M in [self.Module] + self.DependentLibraryList:
602 Inherited = False
603 for D in M.DepexExpression[self.Arch, self.ModuleType]:
604 if DepexExpressionString != '':
605 DepexExpressionString += ' AND '
606 DepexExpressionString += '('
607 DepexExpressionString += D
608 DepexExpressionString = DepexExpressionString.rstrip('END').strip()
609 DepexExpressionString += ')'
610 Inherited = True
611 if Inherited:
612 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
613 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
614 break
615 if len(DepexExpressionString) > 0:
616 EdkLogger.verbose('')
617
618 return {self.ModuleType:DepexExpressionString}
619
620 # Get the tiano core user extension, it is contain dependent library.
621 # @retval: a list contain tiano core userextension.
622 #
623 def _GetTianoCoreUserExtensionList(self):
624 TianoCoreUserExtentionList = []
625 for M in [self.Module] + self.DependentLibraryList:
626 Filename = M.MetaFile.Path
627 InfObj = InfSectionParser.InfSectionParser(Filename)
628 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
629 for TianoCoreUserExtent in TianoCoreUserExtenList:
630 for Section in TianoCoreUserExtent:
631 ItemList = Section.split(TAB_SPLIT)
632 Arch = self.Arch
633 if len(ItemList) == 4:
634 Arch = ItemList[3]
635 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
636 TianoCoreList = []
637 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
638 TianoCoreList.extend(TianoCoreUserExtent[Section][:])
639 TianoCoreList.append('\n')
640 TianoCoreUserExtentionList.append(TianoCoreList)
641
642 return TianoCoreUserExtentionList
643
644 ## Return the list of specification version required for the module
645 #
646 # @retval list The list of specification defined in module file
647 #
648 @cached_property
649 def Specification(self):
650 return self.Module.Specification
651
652 ## Tool option for the module build
653 #
654 # @param PlatformInfo The object of PlatformBuildInfo
655 # @retval dict The dict containing valid options
656 #
657 @cached_property
658 def BuildOption(self):
659 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
660 if self.BuildRuleOrder:
661 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
662 return RetVal
663
664 ## Get include path list from tool option for the module build
665 #
666 # @retval list The include path list
667 #
668 @cached_property
669 def BuildOptionIncPathList(self):
670 #
671 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
672 # is the former use /I , the Latter used -I to specify include directories
673 #
674 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
675 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
676 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
677 BuildOptIncludeRegEx = gBuildOptIncludePatternOther
678 else:
679 #
680 # New ToolChainFamily, don't known whether there is option to specify include directories
681 #
682 return []
683
684 RetVal = []
685 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
686 try:
687 FlagOption = self.BuildOption[Tool]['FLAGS']
688 except KeyError:
689 FlagOption = ''
690
691 if self.ToolChainFamily != 'RVCT':
692 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
693 else:
694 #
695 # RVCT may specify a list of directory seperated by commas
696 #
697 IncPathList = []
698 for Path in BuildOptIncludeRegEx.findall(FlagOption):
699 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
700 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
701
702 #
703 # EDK II modules must not reference header files outside of the packages they depend on or
704 # within the module's directory tree. Report error if violation.
705 #
706 if GlobalData.gDisableIncludePathCheck == False:
707 for Path in IncPathList:
708 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
709 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
710 EdkLogger.error("build",
711 PARAMETER_INVALID,
712 ExtraData=ErrMsg,
713 File=str(self.MetaFile))
714 RetVal += IncPathList
715 return RetVal
716
717 ## Return a list of files which can be built from source
718 #
719 # What kind of files can be built is determined by build rules in
720 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
721 #
722 @cached_property
723 def SourceFileList(self):
724 RetVal = []
725 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
726 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
727 for F in self.Module.Sources:
728 # match tool chain
729 if F.TagName not in ToolChainTagSet:
730 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
731 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
732 continue
733 # match tool chain family or build rule family
734 if F.ToolChainFamily not in ToolChainFamilySet:
735 EdkLogger.debug(
736 EdkLogger.DEBUG_0,
737 "The file [%s] must be built by tools of [%s], " \
738 "but current toolchain family is [%s], buildrule family is [%s]" \
739 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
740 continue
741
742 # add the file path into search path list for file including
743 if F.Dir not in self.IncludePathList:
744 self.IncludePathList.insert(0, F.Dir)
745 RetVal.append(F)
746
747 self._MatchBuildRuleOrder(RetVal)
748
749 for F in RetVal:
750 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
751 return RetVal
752
753 def _MatchBuildRuleOrder(self, FileList):
754 Order_Dict = {}
755 self.BuildOption
756 for SingleFile in FileList:
757 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
758 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
759 if key in Order_Dict:
760 Order_Dict[key].append(SingleFile.Ext)
761 else:
762 Order_Dict[key] = [SingleFile.Ext]
763
764 RemoveList = []
765 for F in Order_Dict:
766 if len(Order_Dict[F]) > 1:
767 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
768 for Ext in Order_Dict[F][1:]:
769 RemoveList.append(F + Ext)
770
771 for item in RemoveList:
772 FileList.remove(item)
773
774 return FileList
775
776 ## Return the list of unicode files
777 @cached_property
778 def UnicodeFileList(self):
779 return self.FileTypes.get(TAB_UNICODE_FILE,[])
780
781 ## Return the list of vfr files
782 @cached_property
783 def VfrFileList(self):
784 return self.FileTypes.get(TAB_VFR_FILE, [])
785
786 ## Return the list of Image Definition files
787 @cached_property
788 def IdfFileList(self):
789 return self.FileTypes.get(TAB_IMAGE_FILE,[])
790
791 ## Return a list of files which can be built from binary
792 #
793 # "Build" binary files are just to copy them to build directory.
794 #
795 # @retval list The list of files which can be built later
796 #
797 @cached_property
798 def BinaryFileList(self):
799 RetVal = []
800 for F in self.Module.Binaries:
801 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
802 continue
803 RetVal.append(F)
804 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
805 return RetVal
806
807 @cached_property
808 def BuildRules(self):
809 RetVal = {}
810 BuildRuleDatabase = self.PlatformInfo.BuildRule
811 for Type in BuildRuleDatabase.FileTypeList:
812 #first try getting build rule by BuildRuleFamily
813 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
814 if not RuleObject:
815 # build type is always module type, but ...
816 if self.ModuleType != self.BuildType:
817 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
818 #second try getting build rule by ToolChainFamily
819 if not RuleObject:
820 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
821 if not RuleObject:
822 # build type is always module type, but ...
823 if self.ModuleType != self.BuildType:
824 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
825 if not RuleObject:
826 continue
827 RuleObject = RuleObject.Instantiate(self.Macros)
828 RetVal[Type] = RuleObject
829 for Ext in RuleObject.SourceFileExtList:
830 RetVal[Ext] = RuleObject
831 return RetVal
832
833 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
834 if self._BuildTargets is None:
835 self._IntroBuildTargetList = set()
836 self._FinalBuildTargetList = set()
837 self._BuildTargets = defaultdict(set)
838 self._FileTypes = defaultdict(set)
839
840 if not BinaryFileList:
841 BinaryFileList = self.BinaryFileList
842
843 SubDirectory = os.path.join(self.OutputDir, File.SubDir)
844 if not os.path.exists(SubDirectory):
845 CreateDirectory(SubDirectory)
846 LastTarget = None
847 RuleChain = set()
848 SourceList = [File]
849 Index = 0
850 #
851 # Make sure to get build rule order value
852 #
853 self.BuildOption
854
855 while Index < len(SourceList):
856 Source = SourceList[Index]
857 Index = Index + 1
858
859 if Source != File:
860 CreateDirectory(Source.Dir)
861
862 if File.IsBinary and File == Source and File in BinaryFileList:
863 # Skip all files that are not binary libraries
864 if not self.IsLibrary:
865 continue
866 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
867 elif FileType in self.BuildRules:
868 RuleObject = self.BuildRules[FileType]
869 elif Source.Ext in self.BuildRules:
870 RuleObject = self.BuildRules[Source.Ext]
871 else:
872 # stop at no more rules
873 if LastTarget:
874 self._FinalBuildTargetList.add(LastTarget)
875 break
876
877 FileType = RuleObject.SourceFileType
878 self._FileTypes[FileType].add(Source)
879
880 # stop at STATIC_LIBRARY for library
881 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
882 if LastTarget:
883 self._FinalBuildTargetList.add(LastTarget)
884 break
885
886 Target = RuleObject.Apply(Source, self.BuildRuleOrder)
887 if not Target:
888 if LastTarget:
889 self._FinalBuildTargetList.add(LastTarget)
890 break
891 elif not Target.Outputs:
892 # Only do build for target with outputs
893 self._FinalBuildTargetList.add(Target)
894
895 self._BuildTargets[FileType].add(Target)
896
897 if not Source.IsBinary and Source == File:
898 self._IntroBuildTargetList.add(Target)
899
900 # to avoid cyclic rule
901 if FileType in RuleChain:
902 break
903
904 RuleChain.add(FileType)
905 SourceList.extend(Target.Outputs)
906 LastTarget = Target
907 FileType = TAB_UNKNOWN_FILE
908
909 @cached_property
910 def Targets(self):
911 if self._BuildTargets is None:
912 self._IntroBuildTargetList = set()
913 self._FinalBuildTargetList = set()
914 self._BuildTargets = defaultdict(set)
915 self._FileTypes = defaultdict(set)
916
917 #TRICK: call SourceFileList property to apply build rule for source files
918 self.SourceFileList
919
920 #TRICK: call _GetBinaryFileList to apply build rule for binary files
921 self.BinaryFileList
922
923 return self._BuildTargets
924
925 @cached_property
926 def IntroTargetList(self):
927 self.Targets
928 return self._IntroBuildTargetList
929
930 @cached_property
931 def CodaTargetList(self):
932 self.Targets
933 return self._FinalBuildTargetList
934
935 @cached_property
936 def FileTypes(self):
937 self.Targets
938 return self._FileTypes
939
940 ## Get the list of package object the module depends on
941 #
942 # @retval list The package object list
943 #
944 @cached_property
945 def DependentPackageList(self):
946 return self.Module.Packages
947
948 ## Return the list of auto-generated code file
949 #
950 # @retval list The list of auto-generated file
951 #
952 @cached_property
953 def AutoGenFileList(self):
954 AutoGenUniIdf = self.BuildType != 'UEFI_HII'
955 UniStringBinBuffer = BytesIO()
956 IdfGenBinBuffer = BytesIO()
957 RetVal = {}
958 AutoGenC = TemplateString()
959 AutoGenH = TemplateString()
960 StringH = TemplateString()
961 StringIdf = TemplateString()
962 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
963 #
964 # AutoGen.c is generated if there are library classes in inf, or there are object files
965 #
966 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
967 or TAB_OBJECT_FILE in self.FileTypes):
968 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
969 RetVal[AutoFile] = str(AutoGenC)
970 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
971 if str(AutoGenH) != "":
972 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
973 RetVal[AutoFile] = str(AutoGenH)
974 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
975 if str(StringH) != "":
976 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
977 RetVal[AutoFile] = str(StringH)
978 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
979 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
980 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
981 RetVal[AutoFile] = UniStringBinBuffer.getvalue()
982 AutoFile.IsBinary = True
983 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
984 if UniStringBinBuffer is not None:
985 UniStringBinBuffer.close()
986 if str(StringIdf) != "":
987 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
988 RetVal[AutoFile] = str(StringIdf)
989 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
990 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
991 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
992 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
993 AutoFile.IsBinary = True
994 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
995 if IdfGenBinBuffer is not None:
996 IdfGenBinBuffer.close()
997 return RetVal
998
999 ## Return the list of library modules explicitly or implicitly used by this module
1000 @cached_property
1001 def DependentLibraryList(self):
1002 # only merge library classes and PCD for non-library module
1003 if self.IsLibrary:
1004 return []
1005 return self.PlatformInfo.ApplyLibraryInstance(self.Module)
1006
1007 ## Get the list of PCDs from current module
1008 #
1009 # @retval list The list of PCD
1010 #
1011 @cached_property
1012 def ModulePcdList(self):
1013 # apply PCD settings from platform
1014 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
1015
1016 return RetVal
1017 @cached_property
1018 def _PcdComments(self):
1019 ReVal = OrderedListDict()
1020 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
1021 if not self.IsLibrary:
1022 for Library in self.DependentLibraryList:
1023 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
1024 return ReVal
1025
1026 ## Get the list of PCDs from dependent libraries
1027 #
1028 # @retval list The list of PCD
1029 #
1030 @cached_property
1031 def LibraryPcdList(self):
1032 if self.IsLibrary:
1033 return []
1034 RetVal = []
1035 Pcds = set()
1036 # get PCDs from dependent libraries
1037 for Library in self.DependentLibraryList:
1038 PcdsInLibrary = OrderedDict()
1039 for Key in Library.Pcds:
1040 # skip duplicated PCDs
1041 if Key in self.Module.Pcds or Key in Pcds:
1042 continue
1043 Pcds.add(Key)
1044 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
1045 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))
1046 return RetVal
1047
1048 ## Get the GUID value mapping
1049 #
1050 # @retval dict The mapping between GUID cname and its value
1051 #
1052 @cached_property
1053 def GuidList(self):
1054 RetVal = self.Module.Guids
1055 for Library in self.DependentLibraryList:
1056 RetVal.update(Library.Guids)
1057 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
1058 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
1059 return RetVal
1060
1061 @cached_property
1062 def GetGuidsUsedByPcd(self):
1063 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
1064 for Library in self.DependentLibraryList:
1065 RetVal.update(Library.GetGuidsUsedByPcd())
1066 return RetVal
1067 ## Get the protocol value mapping
1068 #
1069 # @retval dict The mapping between protocol cname and its value
1070 #
1071 @cached_property
1072 def ProtocolList(self):
1073 RetVal = OrderedDict(self.Module.Protocols)
1074 for Library in self.DependentLibraryList:
1075 RetVal.update(Library.Protocols)
1076 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
1077 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
1078 return RetVal
1079
1080 ## Get the PPI value mapping
1081 #
1082 # @retval dict The mapping between PPI cname and its value
1083 #
1084 @cached_property
1085 def PpiList(self):
1086 RetVal = OrderedDict(self.Module.Ppis)
1087 for Library in self.DependentLibraryList:
1088 RetVal.update(Library.Ppis)
1089 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
1090 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
1091 return RetVal
1092
1093 ## Get the list of include search path
1094 #
1095 # @retval list The list path
1096 #
1097 @cached_property
1098 def IncludePathList(self):
1099 RetVal = []
1100 RetVal.append(self.MetaFile.Dir)
1101 RetVal.append(self.DebugDir)
1102
1103 for Package in self.Module.Packages:
1104 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1105 if PackageDir not in RetVal:
1106 RetVal.append(PackageDir)
1107 IncludesList = Package.Includes
1108 if Package._PrivateIncludes:
1109 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
1110 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1111 for Inc in IncludesList:
1112 if Inc not in RetVal:
1113 RetVal.append(str(Inc))
1114 return RetVal
1115
1116 @cached_property
1117 def IncludePathLength(self):
1118 return sum(len(inc)+1 for inc in self.IncludePathList)
1119
1120 ## Get the list of include paths from the packages
1121 #
1122 # @IncludesList list The list path
1123 #
1124 @cached_property
1125 def PackageIncludePathList(self):
1126 IncludesList = []
1127 for Package in self.Module.Packages:
1128 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1129 IncludesList = Package.Includes
1130 if Package._PrivateIncludes:
1131 if not self.MetaFile.Path.startswith(PackageDir):
1132 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1133 return IncludesList
1134
1135 ## Get HII EX PCDs which maybe used by VFR
1136 #
1137 # efivarstore used by VFR may relate with HII EX PCDs
1138 # Get the variable name and GUID from efivarstore and HII EX PCD
1139 # List the HII EX PCDs in As Built INF if both name and GUID match.
1140 #
1141 # @retval list HII EX PCDs
1142 #
1143 def _GetPcdsMaybeUsedByVfr(self):
1144 if not self.SourceFileList:
1145 return []
1146
1147 NameGuids = set()
1148 for SrcFile in self.SourceFileList:
1149 if SrcFile.Ext.lower() != '.vfr':
1150 continue
1151 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
1152 if not os.path.exists(Vfri):
1153 continue
1154 VfriFile = open(Vfri, 'r')
1155 Content = VfriFile.read()
1156 VfriFile.close()
1157 Pos = Content.find('efivarstore')
1158 while Pos != -1:
1159 #
1160 # Make sure 'efivarstore' is the start of efivarstore statement
1161 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1162 #
1163 Index = Pos - 1
1164 while Index >= 0 and Content[Index] in ' \t\r\n':
1165 Index -= 1
1166 if Index >= 0 and Content[Index] != ';':
1167 Pos = Content.find('efivarstore', Pos + len('efivarstore'))
1168 continue
1169 #
1170 # 'efivarstore' must be followed by name and guid
1171 #
1172 Name = gEfiVarStoreNamePattern.search(Content, Pos)
1173 if not Name:
1174 break
1175 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
1176 if not Guid:
1177 break
1178 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
1179 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
1180 Pos = Content.find('efivarstore', Name.end())
1181 if not NameGuids:
1182 return []
1183 HiiExPcds = []
1184 for Pcd in self.PlatformInfo.Pcds.values():
1185 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
1186 continue
1187 for SkuInfo in Pcd.SkuInfoList.values():
1188 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
1189 if not Value:
1190 continue
1191 Name = _ConvertStringToByteArray(SkuInfo.VariableName)
1192 Guid = GuidStructureStringToGuidString(Value)
1193 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
1194 HiiExPcds.append(Pcd)
1195 break
1196
1197 return HiiExPcds
1198
1199 def _GenOffsetBin(self):
1200 VfrUniBaseName = {}
1201 for SourceFile in self.Module.Sources:
1202 if SourceFile.Type.upper() == ".VFR" :
1203 #
1204 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1205 #
1206 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
1207 elif SourceFile.Type.upper() == ".UNI" :
1208 #
1209 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1210 #
1211 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
1212
1213 if not VfrUniBaseName:
1214 return None
1215 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
1216 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
1217 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
1218 if not VfrUniOffsetList:
1219 return None
1220
1221 OutputName = '%sOffset.bin' % self.Name
1222 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
1223
1224 try:
1225 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
1226 except:
1227 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
1228
1229 # Use a instance of BytesIO to cache data
1230 fStringIO = BytesIO()
1231
1232 for Item in VfrUniOffsetList:
1233 if (Item[0].find("Strings") != -1):
1234 #
1235 # UNI offset in image.
1236 # GUID + Offset
1237 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1238 #
1239 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1240 fStringIO.write(UniGuid)
1241 UniValue = pack ('Q', int (Item[1], 16))
1242 fStringIO.write (UniValue)
1243 else:
1244 #
1245 # VFR binary offset in image.
1246 # GUID + Offset
1247 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1248 #
1249 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1250 fStringIO.write(VfrGuid)
1251 VfrValue = pack ('Q', int (Item[1], 16))
1252 fStringIO.write (VfrValue)
1253 #
1254 # write data into file.
1255 #
1256 try :
1257 fInputfile.write (fStringIO.getvalue())
1258 except:
1259 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
1260 "file been locked or using by other applications." %UniVfrOffsetFileName, None)
1261
1262 fStringIO.close ()
1263 fInputfile.close ()
1264 return OutputName
1265
1266 @cached_property
1267 def OutputFile(self):
1268 retVal = set()
1269 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1270 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1271 FfsOutputDir = self.FfsOutputDir.replace('\\', '/').rstrip('/')
1272 for Item in self.CodaTargetList:
1273 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1274 retVal.add(File)
1275 if self.DepexGenerated:
1276 retVal.add(self.Name + '.depex')
1277
1278 Bin = self._GenOffsetBin()
1279 if Bin:
1280 retVal.add(Bin)
1281
1282 for Root, Dirs, Files in os.walk(OutputDir):
1283 for File in Files:
1284 if File.lower().endswith('.pdb'):
1285 retVal.add(File)
1286
1287 for Root, Dirs, Files in os.walk(FfsOutputDir):
1288 for File in Files:
1289 if File.lower().endswith('.ffs') or File.lower().endswith('.offset') or File.lower().endswith('.raw') \
1290 or File.lower().endswith('.raw.txt'):
1291 retVal.add(File)
1292
1293 return retVal
1294
1295 ## Create AsBuilt INF file the module
1296 #
1297 def CreateAsBuiltInf(self):
1298
1299 if self.IsAsBuiltInfCreated:
1300 return
1301
1302 # Skip INF file generation for libraries
1303 if self.IsLibrary:
1304 return
1305
1306 # Skip the following code for modules with no source files
1307 if not self.SourceFileList:
1308 return
1309
1310 # Skip the following code for modules without any binary files
1311 if self.BinaryFileList:
1312 return
1313
1314 ### TODO: How to handles mixed source and binary modules
1315
1316 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1317 # Also find all packages that the DynamicEx PCDs depend on
1318 Pcds = []
1319 PatchablePcds = []
1320 Packages = []
1321 PcdCheckList = []
1322 PcdTokenSpaceList = []
1323 for Pcd in self.ModulePcdList + self.LibraryPcdList:
1324 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
1325 PatchablePcds.append(Pcd)
1326 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
1327 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
1328 if Pcd not in Pcds:
1329 Pcds.append(Pcd)
1330 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
1331 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
1332 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
1333 GuidList = OrderedDict(self.GuidList)
1334 for TokenSpace in self.GetGuidsUsedByPcd:
1335 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1336 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1337 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
1338 GuidList.pop(TokenSpace)
1339 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
1340 for Package in self.DerivedPackageList:
1341 if Package in Packages:
1342 continue
1343 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
1344 Found = False
1345 for Index in range(len(BeChecked)):
1346 for Item in CheckList[Index]:
1347 if Item in BeChecked[Index]:
1348 Packages.append(Package)
1349 Found = True
1350 break
1351 if Found:
1352 break
1353
1354 VfrPcds = self._GetPcdsMaybeUsedByVfr()
1355 for Pkg in self.PlatformInfo.PackageList:
1356 if Pkg in Packages:
1357 continue
1358 for VfrPcd in VfrPcds:
1359 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
1360 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
1361 Packages.append(Pkg)
1362 break
1363
1364 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
1365 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
1366 Guid = self.Guid
1367 MDefs = self.Module.Defines
1368
1369 AsBuiltInfDict = {
1370 'module_name' : self.Name,
1371 'module_guid' : Guid,
1372 'module_module_type' : ModuleType,
1373 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
1374 'pcd_is_driver_string' : [],
1375 'module_uefi_specification_version' : [],
1376 'module_pi_specification_version' : [],
1377 'module_entry_point' : self.Module.ModuleEntryPointList,
1378 'module_unload_image' : self.Module.ModuleUnloadImageList,
1379 'module_constructor' : self.Module.ConstructorList,
1380 'module_destructor' : self.Module.DestructorList,
1381 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
1382 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
1383 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
1384 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
1385 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
1386 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
1387 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
1388 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
1389 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
1390 'module_arch' : self.Arch,
1391 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
1392 'binary_item' : [],
1393 'patchablepcd_item' : [],
1394 'pcd_item' : [],
1395 'protocol_item' : [],
1396 'ppi_item' : [],
1397 'guid_item' : [],
1398 'flags_item' : [],
1399 'libraryclasses_item' : []
1400 }
1401
1402 if 'MODULE_UNI_FILE' in MDefs:
1403 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
1404 if os.path.isfile(UNIFile):
1405 shutil.copy2(UNIFile, self.OutputDir)
1406
1407 if self.AutoGenVersion > int(gInfSpecVersion, 0):
1408 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
1409 else:
1410 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
1411
1412 if DriverType:
1413 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
1414
1415 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
1416 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
1417 if 'PI_SPECIFICATION_VERSION' in self.Specification:
1418 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
1419
1420 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1421 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1422 for Item in self.CodaTargetList:
1423 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1424 if os.path.isabs(File):
1425 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
1426 if Item.Target.Ext.lower() == '.aml':
1427 AsBuiltInfDict['binary_item'].append('ASL|' + File)
1428 elif Item.Target.Ext.lower() == '.acpi':
1429 AsBuiltInfDict['binary_item'].append('ACPI|' + File)
1430 elif Item.Target.Ext.lower() == '.efi':
1431 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
1432 else:
1433 AsBuiltInfDict['binary_item'].append('BIN|' + File)
1434 if not self.DepexGenerated:
1435 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
1436 if os.path.exists(DepexFile):
1437 self.DepexGenerated = True
1438 if self.DepexGenerated:
1439 if self.ModuleType in [SUP_MODULE_PEIM]:
1440 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
1441 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
1442 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
1443 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
1444 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
1445
1446 Bin = self._GenOffsetBin()
1447 if Bin:
1448 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
1449
1450 for Root, Dirs, Files in os.walk(OutputDir):
1451 for File in Files:
1452 if File.lower().endswith('.pdb'):
1453 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
1454 HeaderComments = self.Module.HeaderComments
1455 StartPos = 0
1456 for Index in range(len(HeaderComments)):
1457 if HeaderComments[Index].find('@BinaryHeader') != -1:
1458 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
1459 StartPos = Index
1460 break
1461 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
1462 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
1463
1464 GenList = [
1465 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
1466 (self.PpiList, self._PpiComments, 'ppi_item'),
1467 (GuidList, self._GuidComments, 'guid_item')
1468 ]
1469 for Item in GenList:
1470 for CName in Item[0]:
1471 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
1472 Entry = Comments + '\n ' + CName if Comments else CName
1473 AsBuiltInfDict[Item[2]].append(Entry)
1474 PatchList = parsePcdInfoFromMapFile(
1475 os.path.join(self.OutputDir, self.Name + '.map'),
1476 os.path.join(self.OutputDir, self.Name + '.efi')
1477 )
1478 if PatchList:
1479 for Pcd in PatchablePcds:
1480 TokenCName = Pcd.TokenCName
1481 for PcdItem in GlobalData.MixedPcd:
1482 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1483 TokenCName = PcdItem[0]
1484 break
1485 for PatchPcd in PatchList:
1486 if TokenCName == PatchPcd[0]:
1487 break
1488 else:
1489 continue
1490 PcdValue = ''
1491 if Pcd.DatumType == 'BOOLEAN':
1492 BoolValue = Pcd.DefaultValue.upper()
1493 if BoolValue == 'TRUE':
1494 Pcd.DefaultValue = '1'
1495 elif BoolValue == 'FALSE':
1496 Pcd.DefaultValue = '0'
1497
1498 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
1499 HexFormat = '0x%02x'
1500 if Pcd.DatumType == TAB_UINT16:
1501 HexFormat = '0x%04x'
1502 elif Pcd.DatumType == TAB_UINT32:
1503 HexFormat = '0x%08x'
1504 elif Pcd.DatumType == TAB_UINT64:
1505 HexFormat = '0x%016x'
1506 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
1507 else:
1508 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
1509 EdkLogger.error("build", AUTOGEN_ERROR,
1510 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
1511 )
1512 ArraySize = int(Pcd.MaxDatumSize, 0)
1513 PcdValue = Pcd.DefaultValue
1514 if PcdValue[0] != '{':
1515 Unicode = False
1516 if PcdValue[0] == 'L':
1517 Unicode = True
1518 PcdValue = PcdValue.lstrip('L')
1519 PcdValue = eval(PcdValue)
1520 NewValue = '{'
1521 for Index in range(0, len(PcdValue)):
1522 if Unicode:
1523 CharVal = ord(PcdValue[Index])
1524 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
1525 + '0x%02x' % (CharVal >> 8) + ', '
1526 else:
1527 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
1528 Padding = '0x00, '
1529 if Unicode:
1530 Padding = Padding * 2
1531 ArraySize = ArraySize // 2
1532 if ArraySize < (len(PcdValue) + 1):
1533 if Pcd.MaxSizeUserSet:
1534 EdkLogger.error("build", AUTOGEN_ERROR,
1535 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1536 )
1537 else:
1538 ArraySize = len(PcdValue) + 1
1539 if ArraySize > len(PcdValue) + 1:
1540 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
1541 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
1542 elif len(PcdValue.split(',')) <= ArraySize:
1543 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
1544 PcdValue += '}'
1545 else:
1546 if Pcd.MaxSizeUserSet:
1547 EdkLogger.error("build", AUTOGEN_ERROR,
1548 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1549 )
1550 else:
1551 ArraySize = len(PcdValue) + 1
1552 PcdItem = '%s.%s|%s|0x%X' % \
1553 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
1554 PcdComments = ''
1555 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1556 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
1557 if PcdComments:
1558 PcdItem = PcdComments + '\n ' + PcdItem
1559 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
1560
1561 for Pcd in Pcds + VfrPcds:
1562 PcdCommentList = []
1563 HiiInfo = ''
1564 TokenCName = Pcd.TokenCName
1565 for PcdItem in GlobalData.MixedPcd:
1566 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1567 TokenCName = PcdItem[0]
1568 break
1569 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
1570 for SkuName in Pcd.SkuInfoList:
1571 SkuInfo = Pcd.SkuInfoList[SkuName]
1572 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
1573 break
1574 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1575 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
1576 if HiiInfo:
1577 UsageIndex = -1
1578 UsageStr = ''
1579 for Index, Comment in enumerate(PcdCommentList):
1580 for Usage in UsageList:
1581 if Comment.find(Usage) != -1:
1582 UsageStr = Usage
1583 UsageIndex = Index
1584 break
1585 if UsageIndex != -1:
1586 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
1587 else:
1588 PcdCommentList.append('## UNDEFINED ' + HiiInfo)
1589 PcdComments = '\n '.join(PcdCommentList)
1590 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
1591 if PcdComments:
1592 PcdEntry = PcdComments + '\n ' + PcdEntry
1593 AsBuiltInfDict['pcd_item'].append(PcdEntry)
1594 for Item in self.BuildOption:
1595 if 'FLAGS' in self.BuildOption[Item]:
1596 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
1597
1598 # Generated LibraryClasses section in comments.
1599 for Library in self.LibraryAutoGenList:
1600 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
1601
1602 # Generated UserExtensions TianoCore section.
1603 # All tianocore user extensions are copied.
1604 UserExtStr = ''
1605 for TianoCore in self._GetTianoCoreUserExtensionList():
1606 UserExtStr += '\n'.join(TianoCore)
1607 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
1608 if os.path.isfile(ExtensionFile):
1609 shutil.copy2(ExtensionFile, self.OutputDir)
1610 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
1611
1612 # Generated depex expression section in comments.
1613 DepexExpression = self._GetDepexExpresionString()
1614 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
1615
1616 AsBuiltInf = TemplateString()
1617 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
1618
1619 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
1620
1621 self.IsAsBuiltInfCreated = True
1622
1623 def CacheCopyFile(self, OriginDir, CopyDir, File):
1624 sub_dir = os.path.relpath(File, CopyDir)
1625 destination_file = os.path.join(OriginDir, sub_dir)
1626 destination_dir = os.path.dirname(destination_file)
1627 CreateDirectory(destination_dir)
1628 try:
1629 CopyFileOnChange(File, destination_dir)
1630 except:
1631 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
1632 return
1633
1634 def CopyModuleToCache(self):
1635 self.GenPreMakefileHash(GlobalData.gCacheIR)
1636 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1637 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1638 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1639 return False
1640
1641 self.GenMakeHash(GlobalData.gCacheIR)
1642 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1643 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1644 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1645 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1646 return False
1647
1648 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1649 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)
1650 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)
1651
1652 CreateDirectory (FileDir)
1653 self.SaveHashChainFileToCache(GlobalData.gCacheIR)
1654 ModuleFile = path.join(self.OutputDir, self.Name + '.inf')
1655 if os.path.exists(ModuleFile):
1656 CopyFileOnChange(ModuleFile, FileDir)
1657 if not self.OutputFile:
1658 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
1659 self.OutputFile = Ma.Binaries
1660 for File in self.OutputFile:
1661 File = str(File)
1662 if not os.path.isabs(File):
1663 NewFile = os.path.join(self.OutputDir, File)
1664 if not os.path.exists(NewFile):
1665 NewFile = os.path.join(self.FfsOutputDir, File)
1666 File = NewFile
1667 if os.path.exists(File):
1668 if File.lower().endswith('.ffs') or File.lower().endswith('.offset') or File.lower().endswith('.raw') \
1669 or File.lower().endswith('.raw.txt'):
1670 self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)
1671 else:
1672 self.CacheCopyFile(FileDir, self.OutputDir, File)
1673
1674 def SaveHashChainFileToCache(self, gDict):
1675 if not GlobalData.gBinCacheDest:
1676 return False
1677
1678 self.GenPreMakefileHash(gDict)
1679 if not (self.MetaFile.Path, self.Arch) in gDict or \
1680 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1681 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1682 return False
1683
1684 self.GenMakeHash(gDict)
1685 if not (self.MetaFile.Path, self.Arch) in gDict or \
1686 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1687 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1688 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1689 return False
1690
1691 # save the hash chain list as cache file
1692 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1693 CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
1694 CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)
1695 ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")
1696 MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")
1697 ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")
1698
1699 # save the HashChainDict as json file
1700 CreateDirectory (CacheDestDir)
1701 CreateDirectory (CacheHashDestDir)
1702 try:
1703 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1704 if os.path.exists(ModuleHashPair):
1705 f = open(ModuleHashPair, 'r')
1706 ModuleHashPairList = json.load(f)
1707 f.close()
1708 PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
1709 MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
1710 ModuleHashPairList.append((PreMakeHash, MakeHash))
1711 ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))
1712 with open(ModuleHashPair, 'w') as f:
1713 json.dump(ModuleHashPairList, f, indent=2)
1714 except:
1715 EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)
1716 return False
1717
1718 try:
1719 with open(MakeHashChain, 'w') as f:
1720 json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)
1721 except:
1722 EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)
1723 return False
1724
1725 try:
1726 with open(ModuleFilesChain, 'w') as f:
1727 json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)
1728 except:
1729 EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)
1730 return False
1731
1732 # save the autogenfile and makefile for debug usage
1733 CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")
1734 CreateDirectory (CacheDebugDir)
1735 CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)
1736 if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1737 for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1738 CopyFileOnChange(str(File), CacheDebugDir)
1739
1740 return True
1741
1742 ## Create makefile for the module and its dependent libraries
1743 #
1744 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1745 # dependent libraries will be created
1746 #
1747 @cached_class_function
1748 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
1749 gDict = GlobalData.gCacheIR
1750 if (self.MetaFile.Path, self.Arch) in gDict and \
1751 gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
1752 return
1753
1754 # nest this function inside it's only caller.
1755 def CreateTimeStamp():
1756 FileSet = {self.MetaFile.Path}
1757
1758 for SourceFile in self.Module.Sources:
1759 FileSet.add (SourceFile.Path)
1760
1761 for Lib in self.DependentLibraryList:
1762 FileSet.add (Lib.MetaFile.Path)
1763
1764 for f in self.AutoGenDepSet:
1765 FileSet.add (f.Path)
1766
1767 if os.path.exists (self.TimeStampPath):
1768 os.remove (self.TimeStampPath)
1769 with open(self.TimeStampPath, 'w+') as fd:
1770 for f in FileSet:
1771 fd.write(f)
1772 fd.write("\n")
1773
1774 # Ignore generating makefile when it is a binary module
1775 if self.IsBinaryModule:
1776 return
1777
1778 self.GenFfsList = GenFfsList
1779
1780 if not self.IsLibrary and CreateLibraryMakeFile:
1781 for LibraryAutoGen in self.LibraryAutoGenList:
1782 LibraryAutoGen.CreateMakeFile()
1783
1784 # CanSkip uses timestamps to determine build skipping
1785 if self.CanSkip():
1786 return
1787
1788 if len(self.CustomMakefile) == 0:
1789 Makefile = GenMake.ModuleMakefile(self)
1790 else:
1791 Makefile = GenMake.CustomMakefile(self)
1792 if Makefile.Generate():
1793 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
1794 (self.Name, self.Arch))
1795 else:
1796 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
1797 (self.Name, self.Arch))
1798
1799 CreateTimeStamp()
1800
1801 MakefileType = Makefile._FileType
1802 MakefileName = Makefile._FILE_NAME_[MakefileType]
1803 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
1804
1805 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1806 MewIR.MakefilePath = MakefilePath
1807 MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1808 MewIR.CreateMakeFileDone = True
1809 with GlobalData.file_lock:
1810 try:
1811 IR = gDict[(self.MetaFile.Path, self.Arch)]
1812 IR.MakefilePath = MakefilePath
1813 IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1814 IR.CreateMakeFileDone = True
1815 gDict[(self.MetaFile.Path, self.Arch)] = IR
1816 except:
1817 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1818
1819 def CopyBinaryFiles(self):
1820 for File in self.Module.Binaries:
1821 SrcPath = File.Path
1822 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
1823 CopyLongFilePath(SrcPath, DstPath)
1824 ## Create autogen code for the module and its dependent libraries
1825 #
1826 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1827 # dependent libraries will be created
1828 #
1829 def CreateCodeFile(self, CreateLibraryCodeFile=True):
1830 gDict = GlobalData.gCacheIR
1831 if (self.MetaFile.Path, self.Arch) in gDict and \
1832 gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
1833 return
1834
1835 if self.IsCodeFileCreated:
1836 return
1837
1838 # Need to generate PcdDatabase even PcdDriver is binarymodule
1839 if self.IsBinaryModule and self.PcdIsDriver != '':
1840 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
1841 return
1842 if self.IsBinaryModule:
1843 if self.IsLibrary:
1844 self.CopyBinaryFiles()
1845 return
1846
1847 if not self.IsLibrary and CreateLibraryCodeFile:
1848 for LibraryAutoGen in self.LibraryAutoGenList:
1849 LibraryAutoGen.CreateCodeFile()
1850
1851 # CanSkip uses timestamps to determine build skipping
1852 if self.CanSkip():
1853 return
1854
1855 AutoGenList = []
1856 IgoredAutoGenList = []
1857
1858 for File in self.AutoGenFileList:
1859 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
1860 AutoGenList.append(str(File))
1861 else:
1862 IgoredAutoGenList.append(str(File))
1863
1864
1865 for ModuleType in self.DepexList:
1866 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1867 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
1868 continue
1869
1870 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
1871 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
1872
1873 if len(Dpx.PostfixNotation) != 0:
1874 self.DepexGenerated = True
1875
1876 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
1877 AutoGenList.append(str(DpxFile))
1878 else:
1879 IgoredAutoGenList.append(str(DpxFile))
1880
1881 if IgoredAutoGenList == []:
1882 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
1883 (" ".join(AutoGenList), self.Name, self.Arch))
1884 elif AutoGenList == []:
1885 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
1886 (" ".join(IgoredAutoGenList), self.Name, self.Arch))
1887 else:
1888 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
1889 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
1890
1891 self.IsCodeFileCreated = True
1892 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1893 MewIR.CreateCodeFileDone = True
1894 with GlobalData.file_lock:
1895 try:
1896 IR = gDict[(self.MetaFile.Path, self.Arch)]
1897 IR.CreateCodeFileDone = True
1898 gDict[(self.MetaFile.Path, self.Arch)] = IR
1899 except:
1900 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1901
1902 return AutoGenList
1903
1904 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1905 @cached_property
1906 def LibraryAutoGenList(self):
1907 RetVal = []
1908 for Library in self.DependentLibraryList:
1909 La = ModuleAutoGen(
1910 self.Workspace,
1911 Library.MetaFile,
1912 self.BuildTarget,
1913 self.ToolChain,
1914 self.Arch,
1915 self.PlatformInfo.MetaFile,
1916 self.DataPipe
1917 )
1918 La.IsLibrary = True
1919 if La not in RetVal:
1920 RetVal.append(La)
1921 for Lib in La.CodaTargetList:
1922 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
1923 return RetVal
1924
1925 def GenModuleHash(self):
1926 # Initialize a dictionary for each arch type
1927 if self.Arch not in GlobalData.gModuleHash:
1928 GlobalData.gModuleHash[self.Arch] = {}
1929
1930 # Early exit if module or library has been hashed and is in memory
1931 if self.Name in GlobalData.gModuleHash[self.Arch]:
1932 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1933
1934 # Initialze hash object
1935 m = hashlib.md5()
1936
1937 # Add Platform level hash
1938 m.update(GlobalData.gPlatformHash.encode('utf-8'))
1939
1940 # Add Package level hash
1941 if self.DependentPackageList:
1942 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
1943 if Pkg.PackageName in GlobalData.gPackageHash:
1944 m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))
1945
1946 # Add Library hash
1947 if self.LibraryAutoGenList:
1948 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
1949 if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
1950 Lib.GenModuleHash()
1951 m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
1952
1953 # Add Module self
1954 f = open(str(self.MetaFile), 'rb')
1955 Content = f.read()
1956 f.close()
1957 m.update(Content)
1958
1959 # Add Module's source files
1960 if self.SourceFileList:
1961 for File in sorted(self.SourceFileList, key=lambda x: str(x)):
1962 f = open(str(File), 'rb')
1963 Content = f.read()
1964 f.close()
1965 m.update(Content)
1966
1967 GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()
1968
1969 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1970
1971 def GenModuleFilesHash(self, gDict):
1972 # Early exit if module or library has been hashed and is in memory
1973 if (self.MetaFile.Path, self.Arch) in gDict:
1974 if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:
1975 return gDict[(self.MetaFile.Path, self.Arch)]
1976
1977 DependencyFileSet = set()
1978 # Add Module Meta file
1979 DependencyFileSet.add(self.MetaFile)
1980
1981 # Add Module's source files
1982 if self.SourceFileList:
1983 for File in set(self.SourceFileList):
1984 DependencyFileSet.add(File)
1985
1986 # Add modules's include header files
1987 # Search dependency file list for each source file
1988 SourceFileList = []
1989 OutPutFileList = []
1990 for Target in self.IntroTargetList:
1991 SourceFileList.extend(Target.Inputs)
1992 OutPutFileList.extend(Target.Outputs)
1993 if OutPutFileList:
1994 for Item in OutPutFileList:
1995 if Item in SourceFileList:
1996 SourceFileList.remove(Item)
1997 SearchList = []
1998 for file_path in self.IncludePathList + self.BuildOptionIncPathList:
1999 # skip the folders in platform BuildDir which are not been generated yet
2000 if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):
2001 continue
2002 SearchList.append(file_path)
2003 FileDependencyDict = {}
2004 ForceIncludedFile = []
2005 for F in SourceFileList:
2006 # skip the files which are not been generated yet, because
2007 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
2008 if not os.path.exists(F.Path):
2009 continue
2010 FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)
2011
2012 if FileDependencyDict:
2013 for Dependency in FileDependencyDict.values():
2014 DependencyFileSet.update(set(Dependency))
2015
2016 # Caculate all above dependency files hash
2017 # Initialze hash object
2018 FileList = []
2019 m = hashlib.md5()
2020 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2021 if not os.path.exists(str(File)):
2022 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2023 continue
2024 f = open(str(File), 'rb')
2025 Content = f.read()
2026 f.close()
2027 m.update(Content)
2028 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2029
2030
2031 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
2032 MewIR.ModuleFilesHashDigest = m.digest()
2033 MewIR.ModuleFilesHashHexDigest = m.hexdigest()
2034 MewIR.ModuleFilesChain = FileList
2035 with GlobalData.file_lock:
2036 try:
2037 IR = gDict[(self.MetaFile.Path, self.Arch)]
2038 IR.ModuleFilesHashDigest = m.digest()
2039 IR.ModuleFilesHashHexDigest = m.hexdigest()
2040 IR.ModuleFilesChain = FileList
2041 gDict[(self.MetaFile.Path, self.Arch)] = IR
2042 except:
2043 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
2044
2045 return gDict[(self.MetaFile.Path, self.Arch)]
2046
2047 def GenPreMakefileHash(self, gDict):
2048 # Early exit if module or library has been hashed and is in memory
2049 if (self.MetaFile.Path, self.Arch) in gDict and \
2050 gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2051 return gDict[(self.MetaFile.Path, self.Arch)]
2052
2053 # skip binary module
2054 if self.IsBinaryModule:
2055 return
2056
2057 if not (self.MetaFile.Path, self.Arch) in gDict or \
2058 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2059 self.GenModuleFilesHash(gDict)
2060
2061 if not (self.MetaFile.Path, self.Arch) in gDict or \
2062 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2063 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2064 return
2065
2066 # Initialze hash object
2067 m = hashlib.md5()
2068
2069 # Add Platform level hash
2070 if ('PlatformHash') in gDict:
2071 m.update(gDict[('PlatformHash')].encode('utf-8'))
2072 else:
2073 EdkLogger.quiet("[cache warning]: PlatformHash is missing")
2074
2075 # Add Package level hash
2076 if self.DependentPackageList:
2077 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
2078 if (Pkg.PackageName, 'PackageHash') in gDict:
2079 m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))
2080 else:
2081 EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))
2082
2083 # Add Library hash
2084 if self.LibraryAutoGenList:
2085 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2086 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2087 not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:
2088 Lib.GenPreMakefileHash(gDict)
2089 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)
2090
2091 # Add Module self
2092 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2093
2094 with GlobalData.file_lock:
2095 IR = gDict[(self.MetaFile.Path, self.Arch)]
2096 IR.PreMakefileHashHexDigest = m.hexdigest()
2097 gDict[(self.MetaFile.Path, self.Arch)] = IR
2098
2099 return gDict[(self.MetaFile.Path, self.Arch)]
2100
2101 def GenMakeHeaderFilesHash(self, gDict):
2102 # Early exit if module or library has been hashed and is in memory
2103 if (self.MetaFile.Path, self.Arch) in gDict and \
2104 gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2105 return gDict[(self.MetaFile.Path, self.Arch)]
2106
2107 # skip binary module
2108 if self.IsBinaryModule:
2109 return
2110
2111 if not (self.MetaFile.Path, self.Arch) in gDict or \
2112 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
2113 if self.IsLibrary:
2114 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:
2115 self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2116 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:
2117 self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2118 self.CreateCodeFile()
2119 if not (self.MetaFile.Path, self.Arch) in gDict or \
2120 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2121 self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))
2122
2123 if not (self.MetaFile.Path, self.Arch) in gDict or \
2124 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \
2125 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2126 EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2127 return
2128
2129 DependencyFileSet = set()
2130 # Add Makefile
2131 if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:
2132 DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)
2133 else:
2134 EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2135
2136 # Add header files
2137 if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2138 for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2139 DependencyFileSet.add(File)
2140 else:
2141 EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2142
2143 # Add AutoGen files
2144 if self.AutoGenFileList:
2145 for File in set(self.AutoGenFileList):
2146 DependencyFileSet.add(File)
2147
2148 # Caculate all above dependency files hash
2149 # Initialze hash object
2150 FileList = []
2151 m = hashlib.md5()
2152 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2153 if not os.path.exists(str(File)):
2154 EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2155 continue
2156 f = open(str(File), 'rb')
2157 Content = f.read()
2158 f.close()
2159 m.update(Content)
2160 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2161
2162 with GlobalData.file_lock:
2163 IR = gDict[(self.MetaFile.Path, self.Arch)]
2164 IR.AutoGenFileList = self.AutoGenFileList.keys()
2165 IR.MakeHeaderFilesHashChain = FileList
2166 IR.MakeHeaderFilesHashDigest = m.digest()
2167 gDict[(self.MetaFile.Path, self.Arch)] = IR
2168
2169 return gDict[(self.MetaFile.Path, self.Arch)]
2170
2171 def GenMakeHash(self, gDict):
2172 # Early exit if module or library has been hashed and is in memory
2173 if (self.MetaFile.Path, self.Arch) in gDict and \
2174 gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2175 return gDict[(self.MetaFile.Path, self.Arch)]
2176
2177 # skip binary module
2178 if self.IsBinaryModule:
2179 return
2180
2181 if not (self.MetaFile.Path, self.Arch) in gDict or \
2182 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2183 self.GenModuleFilesHash(gDict)
2184 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2185 self.GenMakeHeaderFilesHash(gDict)
2186
2187 if not (self.MetaFile.Path, self.Arch) in gDict or \
2188 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \
2189 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \
2190 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \
2191 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:
2192 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2193 return
2194
2195 # Initialze hash object
2196 m = hashlib.md5()
2197 MakeHashChain = []
2198
2199 # Add hash of makefile and dependency header files
2200 m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)
2201 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))
2202 New.sort(key=lambda x: str(x))
2203 MakeHashChain += New
2204
2205 # Add Library hash
2206 if self.LibraryAutoGenList:
2207 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2208 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2209 not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:
2210 Lib.GenMakeHash(gDict)
2211 if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:
2212 print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)
2213 continue
2214 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)
2215 New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))
2216 New.sort(key=lambda x: str(x))
2217 MakeHashChain += New
2218
2219 # Add Module self
2220 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2221 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))
2222 New.sort(key=lambda x: str(x))
2223 MakeHashChain += New
2224
2225 with GlobalData.file_lock:
2226 IR = gDict[(self.MetaFile.Path, self.Arch)]
2227 IR.MakeHashDigest = m.digest()
2228 IR.MakeHashHexDigest = m.hexdigest()
2229 IR.MakeHashChain = MakeHashChain
2230 gDict[(self.MetaFile.Path, self.Arch)] = IR
2231
2232 return gDict[(self.MetaFile.Path, self.Arch)]
2233
2234 ## Decide whether we can skip the left autogen and make process
2235 def CanSkipbyPreMakefileCache(self, gDict):
2236 if not GlobalData.gBinCacheSource:
2237 return False
2238
2239 # If Module is binary, do not skip by cache
2240 if self.IsBinaryModule:
2241 return False
2242
2243 # .inc is contains binary information so do not skip by hash as well
2244 for f_ext in self.SourceFileList:
2245 if '.inc' in str(f_ext):
2246 return False
2247
2248 # Get the module hash values from stored cache and currrent build
2249 # then check whether cache hit based on the hash values
2250 # if cache hit, restore all the files from cache
2251 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2252 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2253
2254 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2255 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2256 if not os.path.exists(ModuleHashPair):
2257 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2258 return False
2259
2260 try:
2261 f = open(ModuleHashPair, 'r')
2262 ModuleHashPairList = json.load(f)
2263 f.close()
2264 except:
2265 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2266 return False
2267
2268 self.GenPreMakefileHash(gDict)
2269 if not (self.MetaFile.Path, self.Arch) in gDict or \
2270 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2271 EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2272 return False
2273
2274 MakeHashStr = None
2275 CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
2276 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2277 if PreMakefileHash == CurrentPreMakeHash:
2278 MakeHashStr = str(MakeHash)
2279
2280 if not MakeHashStr:
2281 return False
2282
2283 TargetHashDir = path.join(FileDir, MakeHashStr)
2284 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2285
2286 if not os.path.exists(TargetHashDir):
2287 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2288 return False
2289
2290 for root, dir, files in os.walk(TargetHashDir):
2291 for f in files:
2292 File = path.join(root, f)
2293 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2294 if os.path.exists(TargetFfsHashDir):
2295 for root, dir, files in os.walk(TargetFfsHashDir):
2296 for f in files:
2297 File = path.join(root, f)
2298 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2299
2300 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2301 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2302
2303 with GlobalData.file_lock:
2304 IR = gDict[(self.MetaFile.Path, self.Arch)]
2305 IR.PreMakeCacheHit = True
2306 gDict[(self.MetaFile.Path, self.Arch)] = IR
2307 print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)
2308 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2309 return True
2310
2311 ## Decide whether we can skip the make process
2312 def CanSkipbyMakeCache(self, gDict):
2313 if not GlobalData.gBinCacheSource:
2314 return False
2315
2316 # If Module is binary, do not skip by cache
2317 if self.IsBinaryModule:
2318 print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)
2319 return False
2320
2321 # .inc is contains binary information so do not skip by hash as well
2322 for f_ext in self.SourceFileList:
2323 if '.inc' in str(f_ext):
2324 with GlobalData.file_lock:
2325 IR = gDict[(self.MetaFile.Path, self.Arch)]
2326 IR.MakeCacheHit = False
2327 gDict[(self.MetaFile.Path, self.Arch)] = IR
2328 print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)
2329 return False
2330
2331 # Get the module hash values from stored cache and currrent build
2332 # then check whether cache hit based on the hash values
2333 # if cache hit, restore all the files from cache
2334 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2335 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2336
2337 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2338 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2339 if not os.path.exists(ModuleHashPair):
2340 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2341 return False
2342
2343 try:
2344 f = open(ModuleHashPair, 'r')
2345 ModuleHashPairList = json.load(f)
2346 f.close()
2347 except:
2348 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2349 return False
2350
2351 self.GenMakeHash(gDict)
2352 if not (self.MetaFile.Path, self.Arch) in gDict or \
2353 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
2354 EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2355 return False
2356
2357 MakeHashStr = None
2358 CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
2359 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2360 if MakeHash == CurrentMakeHash:
2361 MakeHashStr = str(MakeHash)
2362
2363 if not MakeHashStr:
2364 print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2365 return False
2366
2367 TargetHashDir = path.join(FileDir, MakeHashStr)
2368 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2369 if not os.path.exists(TargetHashDir):
2370 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2371 return False
2372
2373 for root, dir, files in os.walk(TargetHashDir):
2374 for f in files:
2375 File = path.join(root, f)
2376 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2377
2378 if os.path.exists(TargetFfsHashDir):
2379 for root, dir, files in os.walk(TargetFfsHashDir):
2380 for f in files:
2381 File = path.join(root, f)
2382 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2383
2384 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2385 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2386 with GlobalData.file_lock:
2387 IR = gDict[(self.MetaFile.Path, self.Arch)]
2388 IR.MakeCacheHit = True
2389 gDict[(self.MetaFile.Path, self.Arch)] = IR
2390 print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2391 return True
2392
2393 ## Show the first file name which causes cache miss
2394 def PrintFirstMakeCacheMissFile(self, gDict):
2395 if not GlobalData.gBinCacheSource:
2396 return
2397
2398 # skip binary module
2399 if self.IsBinaryModule:
2400 return
2401
2402 if not (self.MetaFile.Path, self.Arch) in gDict:
2403 return
2404
2405 # Only print cache miss file for the MakeCache not hit module
2406 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2407 return
2408
2409 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2410 EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))
2411 return
2412
2413 # Find the cache dir name through the .ModuleHashPair file info
2414 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2415
2416 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2417 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2418 if not os.path.exists(ModuleHashPair):
2419 EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2420 return
2421
2422 try:
2423 f = open(ModuleHashPair, 'r')
2424 ModuleHashPairList = json.load(f)
2425 f.close()
2426 except:
2427 EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2428 return
2429
2430 MakeHashSet = set()
2431 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2432 TargetHashDir = path.join(FileDir, str(MakeHash))
2433 if os.path.exists(TargetHashDir):
2434 MakeHashSet.add(MakeHash)
2435 if not MakeHashSet:
2436 EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2437 return
2438
2439 TargetHash = list(MakeHashSet)[0]
2440 TargetHashDir = path.join(FileDir, str(TargetHash))
2441 if len(MakeHashSet) > 1 :
2442 EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))
2443
2444 ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')
2445 if os.path.exists(ListFile):
2446 try:
2447 f = open(ListFile, 'r')
2448 CachedList = json.load(f)
2449 f.close()
2450 except:
2451 EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)
2452 return
2453 else:
2454 EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)
2455 return
2456
2457 CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain
2458 for idx, (file, hash) in enumerate (CurrentList):
2459 (filecached, hashcached) = CachedList[idx]
2460 if file != filecached:
2461 EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))
2462 break
2463 if hash != hashcached:
2464 EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))
2465 break
2466
2467 return True
2468
2469 ## Decide whether we can skip the ModuleAutoGen process
2470 def CanSkipbyCache(self, gDict):
2471 # Hashing feature is off
2472 if not GlobalData.gBinCacheSource:
2473 return False
2474
2475 if self in GlobalData.gBuildHashSkipTracking:
2476 return GlobalData.gBuildHashSkipTracking[self]
2477
2478 # If library or Module is binary do not skip by hash
2479 if self.IsBinaryModule:
2480 GlobalData.gBuildHashSkipTracking[self] = False
2481 return False
2482
2483 # .inc is contains binary information so do not skip by hash as well
2484 for f_ext in self.SourceFileList:
2485 if '.inc' in str(f_ext):
2486 GlobalData.gBuildHashSkipTracking[self] = False
2487 return False
2488
2489 if not (self.MetaFile.Path, self.Arch) in gDict:
2490 return False
2491
2492 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:
2493 GlobalData.gBuildHashSkipTracking[self] = True
2494 return True
2495
2496 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2497 GlobalData.gBuildHashSkipTracking[self] = True
2498 return True
2499
2500 return False
2501
2502 ## Decide whether we can skip the ModuleAutoGen process
2503 # If any source file is newer than the module than we cannot skip
2504 #
2505 def CanSkip(self):
2506 # Don't skip if cache feature enabled
2507 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
2508 return False
2509 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
2510 return True
2511 if not os.path.exists(self.TimeStampPath):
2512 return False
2513 #last creation time of the module
2514 DstTimeStamp = os.stat(self.TimeStampPath)[8]
2515
2516 SrcTimeStamp = self.Workspace._SrcTimeStamp
2517 if SrcTimeStamp > DstTimeStamp:
2518 return False
2519
2520 with open(self.TimeStampPath,'r') as f:
2521 for source in f:
2522 source = source.rstrip('\n')
2523 if not os.path.exists(source):
2524 return False
2525 if source not in ModuleAutoGen.TimeDict :
2526 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
2527 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
2528 return False
2529 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
2530 return True
2531
2532 @cached_property
2533 def TimeStampPath(self):
2534 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')