]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: fix an incremental build issue.
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 from AutoGen.AutoGen import AutoGen
9 from Common.LongFilePathSupport import CopyLongFilePath
10 from Common.BuildToolError import *
11 from Common.DataType import *
12 from Common.Misc import *
13 from Common.StringUtils import NormPath,GetSplitList
14 from collections import defaultdict
15 from Workspace.WorkspaceCommon import OrderedListDict
16 import os.path as path
17 import copy
18 import hashlib
19 from . import InfSectionParser
20 from . import GenC
21 from . import GenMake
22 from . import GenDepex
23 from io import BytesIO
24 from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
25 from Workspace.MetaFileCommentParser import UsageList
26 from .GenPcdDb import CreatePcdDatabaseCode
27 from Common.caching import cached_class_function
28 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
29 from AutoGen.CacheIR import ModuleBuildCacheIR
30 import json
31 import tempfile
32
33 ## Mapping Makefile type
34 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
35 #
36 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
37 # is the former use /I , the Latter used -I to specify include directories
38 #
39 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
40 gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
41
42 ## default file name for AutoGen
43 gAutoGenCodeFileName = "AutoGen.c"
44 gAutoGenHeaderFileName = "AutoGen.h"
45 gAutoGenStringFileName = "%(module_name)sStrDefs.h"
46 gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
47 gAutoGenDepexFileName = "%(module_name)s.depex"
48 gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
49 gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
50 gInfSpecVersion = "0x00010017"
51
52 #
53 # Match name = variable
54 #
55 gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
56 #
57 # The format of guid in efivarstore statement likes following and must be correct:
58 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
59 #
60 gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
61
62 #
63 # Template string to generic AsBuilt INF
64 #
65 gAsBuiltInfHeaderString = TemplateString("""${header_comments}
66
67 # DO NOT EDIT
68 # FILE auto-generated
69
70 [Defines]
71 INF_VERSION = ${module_inf_version}
72 BASE_NAME = ${module_name}
73 FILE_GUID = ${module_guid}
74 MODULE_TYPE = ${module_module_type}${BEGIN}
75 VERSION_STRING = ${module_version_string}${END}${BEGIN}
76 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
77 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
78 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
79 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
80 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
81 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
82 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
83 SHADOW = ${module_shadow}${END}${BEGIN}
84 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
85 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
86 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
87 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
88 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
89 SPEC = ${module_spec}${END}${BEGIN}
90 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
91 MODULE_UNI_FILE = ${module_uni_file}${END}
92
93 [Packages.${module_arch}]${BEGIN}
94 ${package_item}${END}
95
96 [Binaries.${module_arch}]${BEGIN}
97 ${binary_item}${END}
98
99 [PatchPcd.${module_arch}]${BEGIN}
100 ${patchablepcd_item}
101 ${END}
102
103 [Protocols.${module_arch}]${BEGIN}
104 ${protocol_item}
105 ${END}
106
107 [Ppis.${module_arch}]${BEGIN}
108 ${ppi_item}
109 ${END}
110
111 [Guids.${module_arch}]${BEGIN}
112 ${guid_item}
113 ${END}
114
115 [PcdEx.${module_arch}]${BEGIN}
116 ${pcd_item}
117 ${END}
118
119 [LibraryClasses.${module_arch}]
120 ## @LIB_INSTANCES${BEGIN}
121 # ${libraryclasses_item}${END}
122
123 ${depexsection_item}
124
125 ${userextension_tianocore_item}
126
127 ${tail_comments}
128
129 [BuildOptions.${module_arch}]
130 ## @AsBuilt${BEGIN}
131 ## ${flags_item}${END}
132 """)
133 #
134 # extend lists contained in a dictionary with lists stored in another dictionary
135 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
136 #
137 def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
138 for Key in CopyFromDict:
139 CopyToDict[Key].extend(CopyFromDict[Key])
140
141 # Create a directory specified by a set of path elements and return the full path
142 def _MakeDir(PathList):
143 RetVal = path.join(*PathList)
144 CreateDirectory(RetVal)
145 return RetVal
146
147 #
148 # Convert string to C format array
149 #
150 def _ConvertStringToByteArray(Value):
151 Value = Value.strip()
152 if not Value:
153 return None
154 if Value[0] == '{':
155 if not Value.endswith('}'):
156 return None
157 Value = Value.replace(' ', '').replace('{', '').replace('}', '')
158 ValFields = Value.split(',')
159 try:
160 for Index in range(len(ValFields)):
161 ValFields[Index] = str(int(ValFields[Index], 0))
162 except ValueError:
163 return None
164 Value = '{' + ','.join(ValFields) + '}'
165 return Value
166
167 Unicode = False
168 if Value.startswith('L"'):
169 if not Value.endswith('"'):
170 return None
171 Value = Value[1:]
172 Unicode = True
173 elif not Value.startswith('"') or not Value.endswith('"'):
174 return None
175
176 Value = eval(Value) # translate escape character
177 NewValue = '{'
178 for Index in range(0, len(Value)):
179 if Unicode:
180 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
181 else:
182 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
183 Value = NewValue + '0}'
184 return Value
185
186 ## ModuleAutoGen class
187 #
188 # This class encapsules the AutoGen behaviors for the build tools. In addition to
189 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
190 # to the [depex] section in module's inf file.
191 #
192 class ModuleAutoGen(AutoGen):
193 # call super().__init__ then call the worker function with different parameter count
194 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
195 if not hasattr(self, "_Init"):
196 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
197 self._Init = True
198
199 ## Cache the timestamps of metafiles of every module in a class attribute
200 #
201 TimeDict = {}
202
203 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
204 # check if this module is employed by active platform
205 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
206 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
207 % (MetaFile, Arch))
208 return None
209 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
210
211 ## Initialize ModuleAutoGen
212 #
213 # @param Workspace EdkIIWorkspaceBuild object
214 # @param ModuleFile The path of module file
215 # @param Target Build target (DEBUG, RELEASE)
216 # @param Toolchain Name of tool chain
217 # @param Arch The arch the module supports
218 # @param PlatformFile Platform meta-file
219 #
220 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
221 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
222 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
223
224 self.Workspace = Workspace
225 self.WorkspaceDir = ""
226 self.PlatformInfo = None
227 self.DataPipe = DataPipe
228 self.__init_platform_info__()
229 self.MetaFile = ModuleFile
230 self.SourceDir = self.MetaFile.SubDir
231 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
232
233 self.ToolChain = Toolchain
234 self.BuildTarget = Target
235 self.Arch = Arch
236 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
237 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
238
239 self.IsCodeFileCreated = False
240 self.IsAsBuiltInfCreated = False
241 self.DepexGenerated = False
242
243 self.BuildDatabase = self.Workspace.BuildDatabase
244 self.BuildRuleOrder = None
245 self.BuildTime = 0
246
247 self._GuidComments = OrderedListDict()
248 self._ProtocolComments = OrderedListDict()
249 self._PpiComments = OrderedListDict()
250 self._BuildTargets = None
251 self._IntroBuildTargetList = None
252 self._FinalBuildTargetList = None
253 self._FileTypes = None
254
255 self.AutoGenDepSet = set()
256 self.ReferenceModules = []
257 self.ConstPcd = {}
258 self.Makefile = None
259 self.FileDependCache = {}
260
261 def __init_platform_info__(self):
262 pinfo = self.DataPipe.Get("P_Info")
263 self.WorkspaceDir = pinfo.get("WorkspaceDir")
264 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
265 ## hash() operator of ModuleAutoGen
266 #
267 # The module file path and arch string will be used to represent
268 # hash value of this object
269 #
270 # @retval int Hash value of the module file path and arch
271 #
272 @cached_class_function
273 def __hash__(self):
274 return hash((self.MetaFile, self.Arch))
275 def __repr__(self):
276 return "%s [%s]" % (self.MetaFile, self.Arch)
277
278 # Get FixedAtBuild Pcds of this Module
279 @cached_property
280 def FixedAtBuildPcds(self):
281 RetVal = []
282 for Pcd in self.ModulePcdList:
283 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
284 continue
285 if Pcd not in RetVal:
286 RetVal.append(Pcd)
287 return RetVal
288
289 @cached_property
290 def FixedVoidTypePcds(self):
291 RetVal = {}
292 for Pcd in self.FixedAtBuildPcds:
293 if Pcd.DatumType == TAB_VOID:
294 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
295 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
296 return RetVal
297
298 @property
299 def UniqueBaseName(self):
300 ModuleNames = self.DataPipe.Get("M_Name")
301 if not ModuleNames:
302 return self.Name
303 return ModuleNames.get((self.Name,self.MetaFile),self.Name)
304
305 # Macros could be used in build_rule.txt (also Makefile)
306 @cached_property
307 def Macros(self):
308 return OrderedDict((
309 ("WORKSPACE" ,self.WorkspaceDir),
310 ("MODULE_NAME" ,self.Name),
311 ("MODULE_NAME_GUID" ,self.UniqueBaseName),
312 ("MODULE_GUID" ,self.Guid),
313 ("MODULE_VERSION" ,self.Version),
314 ("MODULE_TYPE" ,self.ModuleType),
315 ("MODULE_FILE" ,str(self.MetaFile)),
316 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
317 ("MODULE_RELATIVE_DIR" ,self.SourceDir),
318 ("MODULE_DIR" ,self.SourceDir),
319 ("BASE_NAME" ,self.Name),
320 ("ARCH" ,self.Arch),
321 ("TOOLCHAIN" ,self.ToolChain),
322 ("TOOLCHAIN_TAG" ,self.ToolChain),
323 ("TOOL_CHAIN_TAG" ,self.ToolChain),
324 ("TARGET" ,self.BuildTarget),
325 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
326 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
327 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
328 ("MODULE_BUILD_DIR" ,self.BuildDir),
329 ("OUTPUT_DIR" ,self.OutputDir),
330 ("DEBUG_DIR" ,self.DebugDir),
331 ("DEST_DIR_OUTPUT" ,self.OutputDir),
332 ("DEST_DIR_DEBUG" ,self.DebugDir),
333 ("PLATFORM_NAME" ,self.PlatformInfo.Name),
334 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
335 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
336 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
337 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
338 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
339 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
340 ))
341
342 ## Return the module build data object
343 @cached_property
344 def Module(self):
345 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
346
347 ## Return the module name
348 @cached_property
349 def Name(self):
350 return self.Module.BaseName
351
352 ## Return the module DxsFile if exist
353 @cached_property
354 def DxsFile(self):
355 return self.Module.DxsFile
356
357 ## Return the module meta-file GUID
358 @cached_property
359 def Guid(self):
360 #
361 # To build same module more than once, the module path with FILE_GUID overridden has
362 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
363 # in DSC. The overridden GUID can be retrieved from file name
364 #
365 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
366 #
367 # Length of GUID is 36
368 #
369 return os.path.basename(self.MetaFile.Path)[:36]
370 return self.Module.Guid
371
372 ## Return the module version
373 @cached_property
374 def Version(self):
375 return self.Module.Version
376
377 ## Return the module type
378 @cached_property
379 def ModuleType(self):
380 return self.Module.ModuleType
381
382 ## Return the component type (for Edk.x style of module)
383 @cached_property
384 def ComponentType(self):
385 return self.Module.ComponentType
386
387 ## Return the build type
388 @cached_property
389 def BuildType(self):
390 return self.Module.BuildType
391
392 ## Return the PCD_IS_DRIVER setting
393 @cached_property
394 def PcdIsDriver(self):
395 return self.Module.PcdIsDriver
396
397 ## Return the autogen version, i.e. module meta-file version
398 @cached_property
399 def AutoGenVersion(self):
400 return self.Module.AutoGenVersion
401
402 ## Check if the module is library or not
403 @cached_property
404 def IsLibrary(self):
405 return bool(self.Module.LibraryClass)
406
407 ## Check if the module is binary module or not
408 @cached_property
409 def IsBinaryModule(self):
410 return self.Module.IsBinaryModule
411
412 ## Return the directory to store intermediate files of the module
413 @cached_property
414 def BuildDir(self):
415 return _MakeDir((
416 self.PlatformInfo.BuildDir,
417 self.Arch,
418 self.SourceDir,
419 self.MetaFile.BaseName
420 ))
421
422 ## Return the directory to store the intermediate object files of the module
423 @cached_property
424 def OutputDir(self):
425 return _MakeDir((self.BuildDir, "OUTPUT"))
426
427 ## Return the directory path to store ffs file
428 @cached_property
429 def FfsOutputDir(self):
430 if GlobalData.gFdfParser:
431 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
432 return ''
433
434 ## Return the directory to store auto-gened source files of the module
435 @cached_property
436 def DebugDir(self):
437 return _MakeDir((self.BuildDir, "DEBUG"))
438
439 ## Return the path of custom file
440 @cached_property
441 def CustomMakefile(self):
442 RetVal = {}
443 for Type in self.Module.CustomMakefile:
444 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
445 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
446 RetVal[MakeType] = File
447 return RetVal
448
449 ## Return the directory of the makefile
450 #
451 # @retval string The directory string of module's makefile
452 #
453 @cached_property
454 def MakeFileDir(self):
455 return self.BuildDir
456
457 ## Return build command string
458 #
459 # @retval string Build command string
460 #
461 @cached_property
462 def BuildCommand(self):
463 return self.PlatformInfo.BuildCommand
464
465 ## Get object list of all packages the module and its dependent libraries belong to
466 #
467 # @retval list The list of package object
468 #
469 @cached_property
470 def DerivedPackageList(self):
471 PackageList = []
472 for M in [self.Module] + self.DependentLibraryList:
473 for Package in M.Packages:
474 if Package in PackageList:
475 continue
476 PackageList.append(Package)
477 return PackageList
478
479 ## Get the depex string
480 #
481 # @return : a string contain all depex expression.
482 def _GetDepexExpresionString(self):
483 DepexStr = ''
484 DepexList = []
485 ## DPX_SOURCE IN Define section.
486 if self.Module.DxsFile:
487 return DepexStr
488 for M in [self.Module] + self.DependentLibraryList:
489 Filename = M.MetaFile.Path
490 InfObj = InfSectionParser.InfSectionParser(Filename)
491 DepexExpressionList = InfObj.GetDepexExpresionList()
492 for DepexExpression in DepexExpressionList:
493 for key in DepexExpression:
494 Arch, ModuleType = key
495 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
496 # the type of build module is USER_DEFINED.
497 # All different DEPEX section tags would be copied into the As Built INF file
498 # and there would be separate DEPEX section tags
499 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
500 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
501 DepexList.append({(Arch, ModuleType): DepexExpr})
502 else:
503 if Arch.upper() == TAB_ARCH_COMMON or \
504 (Arch.upper() == self.Arch.upper() and \
505 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
506 DepexList.append({(Arch, ModuleType): DepexExpr})
507
508 #the type of build module is USER_DEFINED.
509 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
510 for Depex in DepexList:
511 for key in Depex:
512 DepexStr += '[Depex.%s.%s]\n' % key
513 DepexStr += '\n'.join('# '+ val for val in Depex[key])
514 DepexStr += '\n\n'
515 if not DepexStr:
516 return '[Depex.%s]\n' % self.Arch
517 return DepexStr
518
519 #the type of build module not is USER_DEFINED.
520 Count = 0
521 for Depex in DepexList:
522 Count += 1
523 if DepexStr != '':
524 DepexStr += ' AND '
525 DepexStr += '('
526 for D in Depex.values():
527 DepexStr += ' '.join(val for val in D)
528 Index = DepexStr.find('END')
529 if Index > -1 and Index == len(DepexStr) - 3:
530 DepexStr = DepexStr[:-3]
531 DepexStr = DepexStr.strip()
532 DepexStr += ')'
533 if Count == 1:
534 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
535 if not DepexStr:
536 return '[Depex.%s]\n' % self.Arch
537 return '[Depex.%s]\n# ' % self.Arch + DepexStr
538
539 ## Merge dependency expression
540 #
541 # @retval list The token list of the dependency expression after parsed
542 #
543 @cached_property
544 def DepexList(self):
545 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
546 return {}
547
548 DepexList = []
549 #
550 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
551 #
552 FixedVoidTypePcds = {}
553 for M in [self] + self.LibraryAutoGenList:
554 FixedVoidTypePcds.update(M.FixedVoidTypePcds)
555 for M in [self] + self.LibraryAutoGenList:
556 Inherited = False
557 for D in M.Module.Depex[self.Arch, self.ModuleType]:
558 if DepexList != []:
559 DepexList.append('AND')
560 DepexList.append('(')
561 #replace D with value if D is FixedAtBuild PCD
562 NewList = []
563 for item in D:
564 if '.' not in item:
565 NewList.append(item)
566 else:
567 try:
568 Value = FixedVoidTypePcds[item]
569 if len(Value.split(',')) != 16:
570 EdkLogger.error("build", FORMAT_INVALID,
571 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
572 NewList.append(Value)
573 except:
574 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
575
576 DepexList.extend(NewList)
577 if DepexList[-1] == 'END': # no need of a END at this time
578 DepexList.pop()
579 DepexList.append(')')
580 Inherited = True
581 if Inherited:
582 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
583 if 'BEFORE' in DepexList or 'AFTER' in DepexList:
584 break
585 if len(DepexList) > 0:
586 EdkLogger.verbose('')
587 return {self.ModuleType:DepexList}
588
589 ## Merge dependency expression
590 #
591 # @retval list The token list of the dependency expression after parsed
592 #
593 @cached_property
594 def DepexExpressionDict(self):
595 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
596 return {}
597
598 DepexExpressionString = ''
599 #
600 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
601 #
602 for M in [self.Module] + self.DependentLibraryList:
603 Inherited = False
604 for D in M.DepexExpression[self.Arch, self.ModuleType]:
605 if DepexExpressionString != '':
606 DepexExpressionString += ' AND '
607 DepexExpressionString += '('
608 DepexExpressionString += D
609 DepexExpressionString = DepexExpressionString.rstrip('END').strip()
610 DepexExpressionString += ')'
611 Inherited = True
612 if Inherited:
613 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
614 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
615 break
616 if len(DepexExpressionString) > 0:
617 EdkLogger.verbose('')
618
619 return {self.ModuleType:DepexExpressionString}
620
621 # Get the tiano core user extension, it is contain dependent library.
622 # @retval: a list contain tiano core userextension.
623 #
624 def _GetTianoCoreUserExtensionList(self):
625 TianoCoreUserExtentionList = []
626 for M in [self.Module] + self.DependentLibraryList:
627 Filename = M.MetaFile.Path
628 InfObj = InfSectionParser.InfSectionParser(Filename)
629 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
630 for TianoCoreUserExtent in TianoCoreUserExtenList:
631 for Section in TianoCoreUserExtent:
632 ItemList = Section.split(TAB_SPLIT)
633 Arch = self.Arch
634 if len(ItemList) == 4:
635 Arch = ItemList[3]
636 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
637 TianoCoreList = []
638 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
639 TianoCoreList.extend(TianoCoreUserExtent[Section][:])
640 TianoCoreList.append('\n')
641 TianoCoreUserExtentionList.append(TianoCoreList)
642
643 return TianoCoreUserExtentionList
644
645 ## Return the list of specification version required for the module
646 #
647 # @retval list The list of specification defined in module file
648 #
649 @cached_property
650 def Specification(self):
651 return self.Module.Specification
652
653 ## Tool option for the module build
654 #
655 # @param PlatformInfo The object of PlatformBuildInfo
656 # @retval dict The dict containing valid options
657 #
658 @cached_property
659 def BuildOption(self):
660 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
661 if self.BuildRuleOrder:
662 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
663 return RetVal
664
665 ## Get include path list from tool option for the module build
666 #
667 # @retval list The include path list
668 #
669 @cached_property
670 def BuildOptionIncPathList(self):
671 #
672 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
673 # is the former use /I , the Latter used -I to specify include directories
674 #
675 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
676 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
677 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
678 BuildOptIncludeRegEx = gBuildOptIncludePatternOther
679 else:
680 #
681 # New ToolChainFamily, don't known whether there is option to specify include directories
682 #
683 return []
684
685 RetVal = []
686 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
687 try:
688 FlagOption = self.BuildOption[Tool]['FLAGS']
689 except KeyError:
690 FlagOption = ''
691
692 if self.ToolChainFamily != 'RVCT':
693 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
694 else:
695 #
696 # RVCT may specify a list of directory seperated by commas
697 #
698 IncPathList = []
699 for Path in BuildOptIncludeRegEx.findall(FlagOption):
700 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
701 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
702
703 #
704 # EDK II modules must not reference header files outside of the packages they depend on or
705 # within the module's directory tree. Report error if violation.
706 #
707 if GlobalData.gDisableIncludePathCheck == False:
708 for Path in IncPathList:
709 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
710 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
711 EdkLogger.error("build",
712 PARAMETER_INVALID,
713 ExtraData=ErrMsg,
714 File=str(self.MetaFile))
715 RetVal += IncPathList
716 return RetVal
717
718 ## Return a list of files which can be built from source
719 #
720 # What kind of files can be built is determined by build rules in
721 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
722 #
723 @cached_property
724 def SourceFileList(self):
725 RetVal = []
726 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
727 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
728 for F in self.Module.Sources:
729 # match tool chain
730 if F.TagName not in ToolChainTagSet:
731 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
732 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
733 continue
734 # match tool chain family or build rule family
735 if F.ToolChainFamily not in ToolChainFamilySet:
736 EdkLogger.debug(
737 EdkLogger.DEBUG_0,
738 "The file [%s] must be built by tools of [%s], " \
739 "but current toolchain family is [%s], buildrule family is [%s]" \
740 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
741 continue
742
743 # add the file path into search path list for file including
744 if F.Dir not in self.IncludePathList:
745 self.IncludePathList.insert(0, F.Dir)
746 RetVal.append(F)
747
748 self._MatchBuildRuleOrder(RetVal)
749
750 for F in RetVal:
751 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
752 return RetVal
753
754 def _MatchBuildRuleOrder(self, FileList):
755 Order_Dict = {}
756 self.BuildOption
757 for SingleFile in FileList:
758 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
759 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
760 if key in Order_Dict:
761 Order_Dict[key].append(SingleFile.Ext)
762 else:
763 Order_Dict[key] = [SingleFile.Ext]
764
765 RemoveList = []
766 for F in Order_Dict:
767 if len(Order_Dict[F]) > 1:
768 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
769 for Ext in Order_Dict[F][1:]:
770 RemoveList.append(F + Ext)
771
772 for item in RemoveList:
773 FileList.remove(item)
774
775 return FileList
776
777 ## Return the list of unicode files
778 @cached_property
779 def UnicodeFileList(self):
780 return self.FileTypes.get(TAB_UNICODE_FILE,[])
781
782 ## Return the list of vfr files
783 @cached_property
784 def VfrFileList(self):
785 return self.FileTypes.get(TAB_VFR_FILE, [])
786
787 ## Return the list of Image Definition files
788 @cached_property
789 def IdfFileList(self):
790 return self.FileTypes.get(TAB_IMAGE_FILE,[])
791
792 ## Return a list of files which can be built from binary
793 #
794 # "Build" binary files are just to copy them to build directory.
795 #
796 # @retval list The list of files which can be built later
797 #
798 @cached_property
799 def BinaryFileList(self):
800 RetVal = []
801 for F in self.Module.Binaries:
802 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
803 continue
804 RetVal.append(F)
805 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
806 return RetVal
807
808 @cached_property
809 def BuildRules(self):
810 RetVal = {}
811 BuildRuleDatabase = self.PlatformInfo.BuildRule
812 for Type in BuildRuleDatabase.FileTypeList:
813 #first try getting build rule by BuildRuleFamily
814 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
815 if not RuleObject:
816 # build type is always module type, but ...
817 if self.ModuleType != self.BuildType:
818 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
819 #second try getting build rule by ToolChainFamily
820 if not RuleObject:
821 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
822 if not RuleObject:
823 # build type is always module type, but ...
824 if self.ModuleType != self.BuildType:
825 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
826 if not RuleObject:
827 continue
828 RuleObject = RuleObject.Instantiate(self.Macros)
829 RetVal[Type] = RuleObject
830 for Ext in RuleObject.SourceFileExtList:
831 RetVal[Ext] = RuleObject
832 return RetVal
833
834 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
835 if self._BuildTargets is None:
836 self._IntroBuildTargetList = set()
837 self._FinalBuildTargetList = set()
838 self._BuildTargets = defaultdict(set)
839 self._FileTypes = defaultdict(set)
840
841 if not BinaryFileList:
842 BinaryFileList = self.BinaryFileList
843
844 SubDirectory = os.path.join(self.OutputDir, File.SubDir)
845 if not os.path.exists(SubDirectory):
846 CreateDirectory(SubDirectory)
847 LastTarget = None
848 RuleChain = set()
849 SourceList = [File]
850 Index = 0
851 #
852 # Make sure to get build rule order value
853 #
854 self.BuildOption
855
856 while Index < len(SourceList):
857 Source = SourceList[Index]
858 Index = Index + 1
859
860 if Source != File:
861 CreateDirectory(Source.Dir)
862
863 if File.IsBinary and File == Source and File in BinaryFileList:
864 # Skip all files that are not binary libraries
865 if not self.IsLibrary:
866 continue
867 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
868 elif FileType in self.BuildRules:
869 RuleObject = self.BuildRules[FileType]
870 elif Source.Ext in self.BuildRules:
871 RuleObject = self.BuildRules[Source.Ext]
872 else:
873 # stop at no more rules
874 if LastTarget:
875 self._FinalBuildTargetList.add(LastTarget)
876 break
877
878 FileType = RuleObject.SourceFileType
879 self._FileTypes[FileType].add(Source)
880
881 # stop at STATIC_LIBRARY for library
882 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
883 if LastTarget:
884 self._FinalBuildTargetList.add(LastTarget)
885 break
886
887 Target = RuleObject.Apply(Source, self.BuildRuleOrder)
888 if not Target:
889 if LastTarget:
890 self._FinalBuildTargetList.add(LastTarget)
891 break
892 elif not Target.Outputs:
893 # Only do build for target with outputs
894 self._FinalBuildTargetList.add(Target)
895
896 self._BuildTargets[FileType].add(Target)
897
898 if not Source.IsBinary and Source == File:
899 self._IntroBuildTargetList.add(Target)
900
901 # to avoid cyclic rule
902 if FileType in RuleChain:
903 break
904
905 RuleChain.add(FileType)
906 SourceList.extend(Target.Outputs)
907 LastTarget = Target
908 FileType = TAB_UNKNOWN_FILE
909
910 @cached_property
911 def Targets(self):
912 if self._BuildTargets is None:
913 self._IntroBuildTargetList = set()
914 self._FinalBuildTargetList = set()
915 self._BuildTargets = defaultdict(set)
916 self._FileTypes = defaultdict(set)
917
918 #TRICK: call SourceFileList property to apply build rule for source files
919 self.SourceFileList
920
921 #TRICK: call _GetBinaryFileList to apply build rule for binary files
922 self.BinaryFileList
923
924 return self._BuildTargets
925
926 @cached_property
927 def IntroTargetList(self):
928 self.Targets
929 return self._IntroBuildTargetList
930
931 @cached_property
932 def CodaTargetList(self):
933 self.Targets
934 return self._FinalBuildTargetList
935
936 @cached_property
937 def FileTypes(self):
938 self.Targets
939 return self._FileTypes
940
941 ## Get the list of package object the module depends on
942 #
943 # @retval list The package object list
944 #
945 @cached_property
946 def DependentPackageList(self):
947 return self.Module.Packages
948
949 ## Return the list of auto-generated code file
950 #
951 # @retval list The list of auto-generated file
952 #
953 @cached_property
954 def AutoGenFileList(self):
955 AutoGenUniIdf = self.BuildType != 'UEFI_HII'
956 UniStringBinBuffer = BytesIO()
957 IdfGenBinBuffer = BytesIO()
958 RetVal = {}
959 AutoGenC = TemplateString()
960 AutoGenH = TemplateString()
961 StringH = TemplateString()
962 StringIdf = TemplateString()
963 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
964 #
965 # AutoGen.c is generated if there are library classes in inf, or there are object files
966 #
967 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
968 or TAB_OBJECT_FILE in self.FileTypes):
969 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
970 RetVal[AutoFile] = str(AutoGenC)
971 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
972 if str(AutoGenH) != "":
973 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
974 RetVal[AutoFile] = str(AutoGenH)
975 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
976 if str(StringH) != "":
977 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
978 RetVal[AutoFile] = str(StringH)
979 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
980 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
981 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
982 RetVal[AutoFile] = UniStringBinBuffer.getvalue()
983 AutoFile.IsBinary = True
984 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
985 if UniStringBinBuffer is not None:
986 UniStringBinBuffer.close()
987 if str(StringIdf) != "":
988 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
989 RetVal[AutoFile] = str(StringIdf)
990 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
991 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
992 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
993 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
994 AutoFile.IsBinary = True
995 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
996 if IdfGenBinBuffer is not None:
997 IdfGenBinBuffer.close()
998 return RetVal
999
1000 ## Return the list of library modules explicitly or implicitly used by this module
1001 @cached_property
1002 def DependentLibraryList(self):
1003 # only merge library classes and PCD for non-library module
1004 if self.IsLibrary:
1005 return []
1006 return self.PlatformInfo.ApplyLibraryInstance(self.Module)
1007
1008 ## Get the list of PCDs from current module
1009 #
1010 # @retval list The list of PCD
1011 #
1012 @cached_property
1013 def ModulePcdList(self):
1014 # apply PCD settings from platform
1015 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
1016
1017 return RetVal
1018 @cached_property
1019 def _PcdComments(self):
1020 ReVal = OrderedListDict()
1021 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
1022 if not self.IsLibrary:
1023 for Library in self.DependentLibraryList:
1024 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
1025 return ReVal
1026
1027 ## Get the list of PCDs from dependent libraries
1028 #
1029 # @retval list The list of PCD
1030 #
1031 @cached_property
1032 def LibraryPcdList(self):
1033 if self.IsLibrary:
1034 return []
1035 RetVal = []
1036 Pcds = set()
1037 # get PCDs from dependent libraries
1038 for Library in self.DependentLibraryList:
1039 PcdsInLibrary = OrderedDict()
1040 for Key in Library.Pcds:
1041 # skip duplicated PCDs
1042 if Key in self.Module.Pcds or Key in Pcds:
1043 continue
1044 Pcds.add(Key)
1045 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
1046 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))
1047 return RetVal
1048
1049 ## Get the GUID value mapping
1050 #
1051 # @retval dict The mapping between GUID cname and its value
1052 #
1053 @cached_property
1054 def GuidList(self):
1055 RetVal = self.Module.Guids
1056 for Library in self.DependentLibraryList:
1057 RetVal.update(Library.Guids)
1058 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
1059 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
1060 return RetVal
1061
1062 @cached_property
1063 def GetGuidsUsedByPcd(self):
1064 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
1065 for Library in self.DependentLibraryList:
1066 RetVal.update(Library.GetGuidsUsedByPcd())
1067 return RetVal
1068 ## Get the protocol value mapping
1069 #
1070 # @retval dict The mapping between protocol cname and its value
1071 #
1072 @cached_property
1073 def ProtocolList(self):
1074 RetVal = OrderedDict(self.Module.Protocols)
1075 for Library in self.DependentLibraryList:
1076 RetVal.update(Library.Protocols)
1077 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
1078 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
1079 return RetVal
1080
1081 ## Get the PPI value mapping
1082 #
1083 # @retval dict The mapping between PPI cname and its value
1084 #
1085 @cached_property
1086 def PpiList(self):
1087 RetVal = OrderedDict(self.Module.Ppis)
1088 for Library in self.DependentLibraryList:
1089 RetVal.update(Library.Ppis)
1090 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
1091 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
1092 return RetVal
1093
1094 ## Get the list of include search path
1095 #
1096 # @retval list The list path
1097 #
1098 @cached_property
1099 def IncludePathList(self):
1100 RetVal = []
1101 RetVal.append(self.MetaFile.Dir)
1102 RetVal.append(self.DebugDir)
1103
1104 for Package in self.Module.Packages:
1105 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1106 if PackageDir not in RetVal:
1107 RetVal.append(PackageDir)
1108 IncludesList = Package.Includes
1109 if Package._PrivateIncludes:
1110 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
1111 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1112 for Inc in IncludesList:
1113 if Inc not in RetVal:
1114 RetVal.append(str(Inc))
1115 return RetVal
1116
1117 @cached_property
1118 def IncludePathLength(self):
1119 return sum(len(inc)+1 for inc in self.IncludePathList)
1120
1121 ## Get the list of include paths from the packages
1122 #
1123 # @IncludesList list The list path
1124 #
1125 @cached_property
1126 def PackageIncludePathList(self):
1127 IncludesList = []
1128 for Package in self.Module.Packages:
1129 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1130 IncludesList = Package.Includes
1131 if Package._PrivateIncludes:
1132 if not self.MetaFile.Path.startswith(PackageDir):
1133 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1134 return IncludesList
1135
1136 ## Get HII EX PCDs which maybe used by VFR
1137 #
1138 # efivarstore used by VFR may relate with HII EX PCDs
1139 # Get the variable name and GUID from efivarstore and HII EX PCD
1140 # List the HII EX PCDs in As Built INF if both name and GUID match.
1141 #
1142 # @retval list HII EX PCDs
1143 #
1144 def _GetPcdsMaybeUsedByVfr(self):
1145 if not self.SourceFileList:
1146 return []
1147
1148 NameGuids = set()
1149 for SrcFile in self.SourceFileList:
1150 if SrcFile.Ext.lower() != '.vfr':
1151 continue
1152 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
1153 if not os.path.exists(Vfri):
1154 continue
1155 VfriFile = open(Vfri, 'r')
1156 Content = VfriFile.read()
1157 VfriFile.close()
1158 Pos = Content.find('efivarstore')
1159 while Pos != -1:
1160 #
1161 # Make sure 'efivarstore' is the start of efivarstore statement
1162 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1163 #
1164 Index = Pos - 1
1165 while Index >= 0 and Content[Index] in ' \t\r\n':
1166 Index -= 1
1167 if Index >= 0 and Content[Index] != ';':
1168 Pos = Content.find('efivarstore', Pos + len('efivarstore'))
1169 continue
1170 #
1171 # 'efivarstore' must be followed by name and guid
1172 #
1173 Name = gEfiVarStoreNamePattern.search(Content, Pos)
1174 if not Name:
1175 break
1176 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
1177 if not Guid:
1178 break
1179 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
1180 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
1181 Pos = Content.find('efivarstore', Name.end())
1182 if not NameGuids:
1183 return []
1184 HiiExPcds = []
1185 for Pcd in self.PlatformInfo.Pcds.values():
1186 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
1187 continue
1188 for SkuInfo in Pcd.SkuInfoList.values():
1189 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
1190 if not Value:
1191 continue
1192 Name = _ConvertStringToByteArray(SkuInfo.VariableName)
1193 Guid = GuidStructureStringToGuidString(Value)
1194 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
1195 HiiExPcds.append(Pcd)
1196 break
1197
1198 return HiiExPcds
1199
1200 def _GenOffsetBin(self):
1201 VfrUniBaseName = {}
1202 for SourceFile in self.Module.Sources:
1203 if SourceFile.Type.upper() == ".VFR" :
1204 #
1205 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1206 #
1207 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
1208 elif SourceFile.Type.upper() == ".UNI" :
1209 #
1210 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1211 #
1212 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
1213
1214 if not VfrUniBaseName:
1215 return None
1216 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
1217 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
1218 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
1219 if not VfrUniOffsetList:
1220 return None
1221
1222 OutputName = '%sOffset.bin' % self.Name
1223 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
1224
1225 try:
1226 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
1227 except:
1228 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
1229
1230 # Use a instance of BytesIO to cache data
1231 fStringIO = BytesIO()
1232
1233 for Item in VfrUniOffsetList:
1234 if (Item[0].find("Strings") != -1):
1235 #
1236 # UNI offset in image.
1237 # GUID + Offset
1238 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1239 #
1240 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1241 fStringIO.write(UniGuid)
1242 UniValue = pack ('Q', int (Item[1], 16))
1243 fStringIO.write (UniValue)
1244 else:
1245 #
1246 # VFR binary offset in image.
1247 # GUID + Offset
1248 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1249 #
1250 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1251 fStringIO.write(VfrGuid)
1252 VfrValue = pack ('Q', int (Item[1], 16))
1253 fStringIO.write (VfrValue)
1254 #
1255 # write data into file.
1256 #
1257 try :
1258 fInputfile.write (fStringIO.getvalue())
1259 except:
1260 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
1261 "file been locked or using by other applications." %UniVfrOffsetFileName, None)
1262
1263 fStringIO.close ()
1264 fInputfile.close ()
1265 return OutputName
1266
1267 @cached_property
1268 def OutputFile(self):
1269 retVal = set()
1270 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1271 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1272 FfsOutputDir = self.FfsOutputDir.replace('\\', '/').rstrip('/')
1273 for Item in self.CodaTargetList:
1274 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1275 retVal.add(File)
1276 if self.DepexGenerated:
1277 retVal.add(self.Name + '.depex')
1278
1279 Bin = self._GenOffsetBin()
1280 if Bin:
1281 retVal.add(Bin)
1282
1283 for Root, Dirs, Files in os.walk(OutputDir):
1284 for File in Files:
1285 if File.lower().endswith('.pdb'):
1286 retVal.add(File)
1287
1288 for Root, Dirs, Files in os.walk(FfsOutputDir):
1289 for File in Files:
1290 if File.lower().endswith('.ffs') or File.lower().endswith('.offset') or File.lower().endswith('.raw') \
1291 or File.lower().endswith('.raw.txt'):
1292 retVal.add(File)
1293
1294 return retVal
1295
1296 ## Create AsBuilt INF file the module
1297 #
1298 def CreateAsBuiltInf(self):
1299
1300 if self.IsAsBuiltInfCreated:
1301 return
1302
1303 # Skip INF file generation for libraries
1304 if self.IsLibrary:
1305 return
1306
1307 # Skip the following code for modules with no source files
1308 if not self.SourceFileList:
1309 return
1310
1311 # Skip the following code for modules without any binary files
1312 if self.BinaryFileList:
1313 return
1314
1315 ### TODO: How to handles mixed source and binary modules
1316
1317 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1318 # Also find all packages that the DynamicEx PCDs depend on
1319 Pcds = []
1320 PatchablePcds = []
1321 Packages = []
1322 PcdCheckList = []
1323 PcdTokenSpaceList = []
1324 for Pcd in self.ModulePcdList + self.LibraryPcdList:
1325 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
1326 PatchablePcds.append(Pcd)
1327 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
1328 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
1329 if Pcd not in Pcds:
1330 Pcds.append(Pcd)
1331 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
1332 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
1333 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
1334 GuidList = OrderedDict(self.GuidList)
1335 for TokenSpace in self.GetGuidsUsedByPcd:
1336 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1337 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1338 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
1339 GuidList.pop(TokenSpace)
1340 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
1341 for Package in self.DerivedPackageList:
1342 if Package in Packages:
1343 continue
1344 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
1345 Found = False
1346 for Index in range(len(BeChecked)):
1347 for Item in CheckList[Index]:
1348 if Item in BeChecked[Index]:
1349 Packages.append(Package)
1350 Found = True
1351 break
1352 if Found:
1353 break
1354
1355 VfrPcds = self._GetPcdsMaybeUsedByVfr()
1356 for Pkg in self.PlatformInfo.PackageList:
1357 if Pkg in Packages:
1358 continue
1359 for VfrPcd in VfrPcds:
1360 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
1361 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
1362 Packages.append(Pkg)
1363 break
1364
1365 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
1366 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
1367 Guid = self.Guid
1368 MDefs = self.Module.Defines
1369
1370 AsBuiltInfDict = {
1371 'module_name' : self.Name,
1372 'module_guid' : Guid,
1373 'module_module_type' : ModuleType,
1374 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
1375 'pcd_is_driver_string' : [],
1376 'module_uefi_specification_version' : [],
1377 'module_pi_specification_version' : [],
1378 'module_entry_point' : self.Module.ModuleEntryPointList,
1379 'module_unload_image' : self.Module.ModuleUnloadImageList,
1380 'module_constructor' : self.Module.ConstructorList,
1381 'module_destructor' : self.Module.DestructorList,
1382 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
1383 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
1384 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
1385 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
1386 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
1387 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
1388 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
1389 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
1390 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
1391 'module_arch' : self.Arch,
1392 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
1393 'binary_item' : [],
1394 'patchablepcd_item' : [],
1395 'pcd_item' : [],
1396 'protocol_item' : [],
1397 'ppi_item' : [],
1398 'guid_item' : [],
1399 'flags_item' : [],
1400 'libraryclasses_item' : []
1401 }
1402
1403 if 'MODULE_UNI_FILE' in MDefs:
1404 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
1405 if os.path.isfile(UNIFile):
1406 shutil.copy2(UNIFile, self.OutputDir)
1407
1408 if self.AutoGenVersion > int(gInfSpecVersion, 0):
1409 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
1410 else:
1411 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
1412
1413 if DriverType:
1414 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
1415
1416 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
1417 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
1418 if 'PI_SPECIFICATION_VERSION' in self.Specification:
1419 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
1420
1421 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1422 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1423 for Item in self.CodaTargetList:
1424 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1425 if os.path.isabs(File):
1426 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
1427 if Item.Target.Ext.lower() == '.aml':
1428 AsBuiltInfDict['binary_item'].append('ASL|' + File)
1429 elif Item.Target.Ext.lower() == '.acpi':
1430 AsBuiltInfDict['binary_item'].append('ACPI|' + File)
1431 elif Item.Target.Ext.lower() == '.efi':
1432 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
1433 else:
1434 AsBuiltInfDict['binary_item'].append('BIN|' + File)
1435 if not self.DepexGenerated:
1436 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
1437 if os.path.exists(DepexFile):
1438 self.DepexGenerated = True
1439 if self.DepexGenerated:
1440 if self.ModuleType in [SUP_MODULE_PEIM]:
1441 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
1442 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
1443 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
1444 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
1445 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
1446
1447 Bin = self._GenOffsetBin()
1448 if Bin:
1449 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
1450
1451 for Root, Dirs, Files in os.walk(OutputDir):
1452 for File in Files:
1453 if File.lower().endswith('.pdb'):
1454 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
1455 HeaderComments = self.Module.HeaderComments
1456 StartPos = 0
1457 for Index in range(len(HeaderComments)):
1458 if HeaderComments[Index].find('@BinaryHeader') != -1:
1459 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
1460 StartPos = Index
1461 break
1462 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
1463 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
1464
1465 GenList = [
1466 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
1467 (self.PpiList, self._PpiComments, 'ppi_item'),
1468 (GuidList, self._GuidComments, 'guid_item')
1469 ]
1470 for Item in GenList:
1471 for CName in Item[0]:
1472 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
1473 Entry = Comments + '\n ' + CName if Comments else CName
1474 AsBuiltInfDict[Item[2]].append(Entry)
1475 PatchList = parsePcdInfoFromMapFile(
1476 os.path.join(self.OutputDir, self.Name + '.map'),
1477 os.path.join(self.OutputDir, self.Name + '.efi')
1478 )
1479 if PatchList:
1480 for Pcd in PatchablePcds:
1481 TokenCName = Pcd.TokenCName
1482 for PcdItem in GlobalData.MixedPcd:
1483 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1484 TokenCName = PcdItem[0]
1485 break
1486 for PatchPcd in PatchList:
1487 if TokenCName == PatchPcd[0]:
1488 break
1489 else:
1490 continue
1491 PcdValue = ''
1492 if Pcd.DatumType == 'BOOLEAN':
1493 BoolValue = Pcd.DefaultValue.upper()
1494 if BoolValue == 'TRUE':
1495 Pcd.DefaultValue = '1'
1496 elif BoolValue == 'FALSE':
1497 Pcd.DefaultValue = '0'
1498
1499 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
1500 HexFormat = '0x%02x'
1501 if Pcd.DatumType == TAB_UINT16:
1502 HexFormat = '0x%04x'
1503 elif Pcd.DatumType == TAB_UINT32:
1504 HexFormat = '0x%08x'
1505 elif Pcd.DatumType == TAB_UINT64:
1506 HexFormat = '0x%016x'
1507 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
1508 else:
1509 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
1510 EdkLogger.error("build", AUTOGEN_ERROR,
1511 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
1512 )
1513 ArraySize = int(Pcd.MaxDatumSize, 0)
1514 PcdValue = Pcd.DefaultValue
1515 if PcdValue[0] != '{':
1516 Unicode = False
1517 if PcdValue[0] == 'L':
1518 Unicode = True
1519 PcdValue = PcdValue.lstrip('L')
1520 PcdValue = eval(PcdValue)
1521 NewValue = '{'
1522 for Index in range(0, len(PcdValue)):
1523 if Unicode:
1524 CharVal = ord(PcdValue[Index])
1525 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
1526 + '0x%02x' % (CharVal >> 8) + ', '
1527 else:
1528 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
1529 Padding = '0x00, '
1530 if Unicode:
1531 Padding = Padding * 2
1532 ArraySize = ArraySize // 2
1533 if ArraySize < (len(PcdValue) + 1):
1534 if Pcd.MaxSizeUserSet:
1535 EdkLogger.error("build", AUTOGEN_ERROR,
1536 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1537 )
1538 else:
1539 ArraySize = len(PcdValue) + 1
1540 if ArraySize > len(PcdValue) + 1:
1541 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
1542 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
1543 elif len(PcdValue.split(',')) <= ArraySize:
1544 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
1545 PcdValue += '}'
1546 else:
1547 if Pcd.MaxSizeUserSet:
1548 EdkLogger.error("build", AUTOGEN_ERROR,
1549 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1550 )
1551 else:
1552 ArraySize = len(PcdValue) + 1
1553 PcdItem = '%s.%s|%s|0x%X' % \
1554 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
1555 PcdComments = ''
1556 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1557 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
1558 if PcdComments:
1559 PcdItem = PcdComments + '\n ' + PcdItem
1560 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
1561
1562 for Pcd in Pcds + VfrPcds:
1563 PcdCommentList = []
1564 HiiInfo = ''
1565 TokenCName = Pcd.TokenCName
1566 for PcdItem in GlobalData.MixedPcd:
1567 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1568 TokenCName = PcdItem[0]
1569 break
1570 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
1571 for SkuName in Pcd.SkuInfoList:
1572 SkuInfo = Pcd.SkuInfoList[SkuName]
1573 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
1574 break
1575 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1576 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
1577 if HiiInfo:
1578 UsageIndex = -1
1579 UsageStr = ''
1580 for Index, Comment in enumerate(PcdCommentList):
1581 for Usage in UsageList:
1582 if Comment.find(Usage) != -1:
1583 UsageStr = Usage
1584 UsageIndex = Index
1585 break
1586 if UsageIndex != -1:
1587 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
1588 else:
1589 PcdCommentList.append('## UNDEFINED ' + HiiInfo)
1590 PcdComments = '\n '.join(PcdCommentList)
1591 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
1592 if PcdComments:
1593 PcdEntry = PcdComments + '\n ' + PcdEntry
1594 AsBuiltInfDict['pcd_item'].append(PcdEntry)
1595 for Item in self.BuildOption:
1596 if 'FLAGS' in self.BuildOption[Item]:
1597 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
1598
1599 # Generated LibraryClasses section in comments.
1600 for Library in self.LibraryAutoGenList:
1601 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
1602
1603 # Generated UserExtensions TianoCore section.
1604 # All tianocore user extensions are copied.
1605 UserExtStr = ''
1606 for TianoCore in self._GetTianoCoreUserExtensionList():
1607 UserExtStr += '\n'.join(TianoCore)
1608 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
1609 if os.path.isfile(ExtensionFile):
1610 shutil.copy2(ExtensionFile, self.OutputDir)
1611 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
1612
1613 # Generated depex expression section in comments.
1614 DepexExpression = self._GetDepexExpresionString()
1615 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
1616
1617 AsBuiltInf = TemplateString()
1618 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
1619
1620 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
1621
1622 self.IsAsBuiltInfCreated = True
1623
1624 def CacheCopyFile(self, OriginDir, CopyDir, File):
1625 sub_dir = os.path.relpath(File, CopyDir)
1626 destination_file = os.path.join(OriginDir, sub_dir)
1627 destination_dir = os.path.dirname(destination_file)
1628 CreateDirectory(destination_dir)
1629 try:
1630 CopyFileOnChange(File, destination_dir)
1631 except:
1632 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
1633 return
1634
1635 def CopyModuleToCache(self):
1636 self.GenPreMakefileHash(GlobalData.gCacheIR)
1637 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1638 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1639 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1640 return False
1641
1642 self.GenMakeHash(GlobalData.gCacheIR)
1643 if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \
1644 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1645 not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1646 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1647 return False
1648
1649 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1650 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)
1651 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)
1652
1653 CreateDirectory (FileDir)
1654 self.SaveHashChainFileToCache(GlobalData.gCacheIR)
1655 ModuleFile = path.join(self.OutputDir, self.Name + '.inf')
1656 if os.path.exists(ModuleFile):
1657 CopyFileOnChange(ModuleFile, FileDir)
1658 if not self.OutputFile:
1659 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
1660 self.OutputFile = Ma.Binaries
1661 for File in self.OutputFile:
1662 File = str(File)
1663 if not os.path.isabs(File):
1664 NewFile = os.path.join(self.OutputDir, File)
1665 if not os.path.exists(NewFile):
1666 NewFile = os.path.join(self.FfsOutputDir, File)
1667 File = NewFile
1668 if os.path.exists(File):
1669 if File.lower().endswith('.ffs') or File.lower().endswith('.offset') or File.lower().endswith('.raw') \
1670 or File.lower().endswith('.raw.txt'):
1671 self.CacheCopyFile(FfsDir, self.FfsOutputDir, File)
1672 else:
1673 self.CacheCopyFile(FileDir, self.OutputDir, File)
1674
1675 def SaveHashChainFileToCache(self, gDict):
1676 if not GlobalData.gBinCacheDest:
1677 return False
1678
1679 self.GenPreMakefileHash(gDict)
1680 if not (self.MetaFile.Path, self.Arch) in gDict or \
1681 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
1682 EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1683 return False
1684
1685 self.GenMakeHash(gDict)
1686 if not (self.MetaFile.Path, self.Arch) in gDict or \
1687 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \
1688 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
1689 EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
1690 return False
1691
1692 # save the hash chain list as cache file
1693 MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)
1694 CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
1695 CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)
1696 ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")
1697 MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")
1698 ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")
1699
1700 # save the HashChainDict as json file
1701 CreateDirectory (CacheDestDir)
1702 CreateDirectory (CacheHashDestDir)
1703 try:
1704 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1705 if os.path.exists(ModuleHashPair):
1706 with open(ModuleHashPair, 'r') as f:
1707 ModuleHashPairList = json.load(f)
1708 PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
1709 MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
1710 ModuleHashPairList.append((PreMakeHash, MakeHash))
1711 ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))
1712 with open(ModuleHashPair, 'w') as f:
1713 json.dump(ModuleHashPairList, f, indent=2)
1714 except:
1715 EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)
1716 return False
1717
1718 try:
1719 with open(MakeHashChain, 'w') as f:
1720 json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)
1721 except:
1722 EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)
1723 return False
1724
1725 try:
1726 with open(ModuleFilesChain, 'w') as f:
1727 json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)
1728 except:
1729 EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)
1730 return False
1731
1732 # save the autogenfile and makefile for debug usage
1733 CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")
1734 CreateDirectory (CacheDebugDir)
1735 CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)
1736 if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1737 for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:
1738 CopyFileOnChange(str(File), CacheDebugDir)
1739
1740 return True
1741
1742 ## Create makefile for the module and its dependent libraries
1743 #
1744 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1745 # dependent libraries will be created
1746 #
1747 @cached_class_function
1748 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
1749 gDict = GlobalData.gCacheIR
1750 if (self.MetaFile.Path, self.Arch) in gDict and \
1751 gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
1752 return
1753
1754 # nest this function inside it's only caller.
1755 def CreateTimeStamp():
1756 FileSet = {self.MetaFile.Path}
1757
1758 for SourceFile in self.Module.Sources:
1759 FileSet.add (SourceFile.Path)
1760
1761 for Lib in self.DependentLibraryList:
1762 FileSet.add (Lib.MetaFile.Path)
1763
1764 for f in self.AutoGenDepSet:
1765 FileSet.add (f.Path)
1766
1767 if os.path.exists (self.TimeStampPath):
1768 os.remove (self.TimeStampPath)
1769
1770 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
1771
1772 # Ignore generating makefile when it is a binary module
1773 if self.IsBinaryModule:
1774 return
1775
1776 self.GenFfsList = GenFfsList
1777
1778 if not self.IsLibrary and CreateLibraryMakeFile:
1779 for LibraryAutoGen in self.LibraryAutoGenList:
1780 LibraryAutoGen.CreateMakeFile()
1781
1782 # CanSkip uses timestamps to determine build skipping
1783 if self.CanSkip():
1784 return
1785
1786 if len(self.CustomMakefile) == 0:
1787 Makefile = GenMake.ModuleMakefile(self)
1788 else:
1789 Makefile = GenMake.CustomMakefile(self)
1790 if Makefile.Generate():
1791 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
1792 (self.Name, self.Arch))
1793 else:
1794 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
1795 (self.Name, self.Arch))
1796
1797 CreateTimeStamp()
1798
1799 MakefileType = Makefile._FileType
1800 MakefileName = Makefile._FILE_NAME_[MakefileType]
1801 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
1802
1803 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1804 MewIR.MakefilePath = MakefilePath
1805 MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1806 MewIR.CreateMakeFileDone = True
1807 with GlobalData.cache_lock:
1808 try:
1809 IR = gDict[(self.MetaFile.Path, self.Arch)]
1810 IR.MakefilePath = MakefilePath
1811 IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet
1812 IR.CreateMakeFileDone = True
1813 gDict[(self.MetaFile.Path, self.Arch)] = IR
1814 except:
1815 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1816
1817 def CopyBinaryFiles(self):
1818 for File in self.Module.Binaries:
1819 SrcPath = File.Path
1820 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
1821 CopyLongFilePath(SrcPath, DstPath)
1822 ## Create autogen code for the module and its dependent libraries
1823 #
1824 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1825 # dependent libraries will be created
1826 #
1827 def CreateCodeFile(self, CreateLibraryCodeFile=True):
1828 gDict = GlobalData.gCacheIR
1829 if (self.MetaFile.Path, self.Arch) in gDict and \
1830 gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
1831 return
1832
1833 if self.IsCodeFileCreated:
1834 return
1835
1836 # Need to generate PcdDatabase even PcdDriver is binarymodule
1837 if self.IsBinaryModule and self.PcdIsDriver != '':
1838 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
1839 return
1840 if self.IsBinaryModule:
1841 if self.IsLibrary:
1842 self.CopyBinaryFiles()
1843 return
1844
1845 if not self.IsLibrary and CreateLibraryCodeFile:
1846 for LibraryAutoGen in self.LibraryAutoGenList:
1847 LibraryAutoGen.CreateCodeFile()
1848
1849 # CanSkip uses timestamps to determine build skipping
1850 if self.CanSkip():
1851 return
1852
1853 AutoGenList = []
1854 IgoredAutoGenList = []
1855
1856 for File in self.AutoGenFileList:
1857 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
1858 AutoGenList.append(str(File))
1859 else:
1860 IgoredAutoGenList.append(str(File))
1861
1862
1863 for ModuleType in self.DepexList:
1864 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1865 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
1866 continue
1867
1868 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
1869 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
1870
1871 if len(Dpx.PostfixNotation) != 0:
1872 self.DepexGenerated = True
1873
1874 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
1875 AutoGenList.append(str(DpxFile))
1876 else:
1877 IgoredAutoGenList.append(str(DpxFile))
1878
1879 if IgoredAutoGenList == []:
1880 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
1881 (" ".join(AutoGenList), self.Name, self.Arch))
1882 elif AutoGenList == []:
1883 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
1884 (" ".join(IgoredAutoGenList), self.Name, self.Arch))
1885 else:
1886 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
1887 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
1888
1889 self.IsCodeFileCreated = True
1890 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
1891 MewIR.CreateCodeFileDone = True
1892 with GlobalData.cache_lock:
1893 try:
1894 IR = gDict[(self.MetaFile.Path, self.Arch)]
1895 IR.CreateCodeFileDone = True
1896 gDict[(self.MetaFile.Path, self.Arch)] = IR
1897 except:
1898 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
1899
1900 return AutoGenList
1901
1902 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1903 @cached_property
1904 def LibraryAutoGenList(self):
1905 RetVal = []
1906 for Library in self.DependentLibraryList:
1907 La = ModuleAutoGen(
1908 self.Workspace,
1909 Library.MetaFile,
1910 self.BuildTarget,
1911 self.ToolChain,
1912 self.Arch,
1913 self.PlatformInfo.MetaFile,
1914 self.DataPipe
1915 )
1916 La.IsLibrary = True
1917 if La not in RetVal:
1918 RetVal.append(La)
1919 for Lib in La.CodaTargetList:
1920 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
1921 return RetVal
1922
1923 def GenModuleHash(self):
1924 # Initialize a dictionary for each arch type
1925 if self.Arch not in GlobalData.gModuleHash:
1926 GlobalData.gModuleHash[self.Arch] = {}
1927
1928 # Early exit if module or library has been hashed and is in memory
1929 if self.Name in GlobalData.gModuleHash[self.Arch]:
1930 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1931
1932 # Initialze hash object
1933 m = hashlib.md5()
1934
1935 # Add Platform level hash
1936 m.update(GlobalData.gPlatformHash.encode('utf-8'))
1937
1938 # Add Package level hash
1939 if self.DependentPackageList:
1940 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
1941 if Pkg.PackageName in GlobalData.gPackageHash:
1942 m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))
1943
1944 # Add Library hash
1945 if self.LibraryAutoGenList:
1946 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
1947 if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
1948 Lib.GenModuleHash()
1949 m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
1950
1951 # Add Module self
1952 with open(str(self.MetaFile), 'rb') as f:
1953 Content = f.read()
1954 m.update(Content)
1955
1956 # Add Module's source files
1957 if self.SourceFileList:
1958 for File in sorted(self.SourceFileList, key=lambda x: str(x)):
1959 f = open(str(File), 'rb')
1960 Content = f.read()
1961 f.close()
1962 m.update(Content)
1963
1964 GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()
1965
1966 return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')
1967
1968 def GenModuleFilesHash(self, gDict):
1969 # Early exit if module or library has been hashed and is in memory
1970 if (self.MetaFile.Path, self.Arch) in gDict:
1971 if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:
1972 return gDict[(self.MetaFile.Path, self.Arch)]
1973
1974 # skip if the module cache already crashed
1975 if (self.MetaFile.Path, self.Arch) in gDict and \
1976 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
1977 return
1978
1979 DependencyFileSet = set()
1980 # Add Module Meta file
1981 DependencyFileSet.add(self.MetaFile)
1982
1983 # Add Module's source files
1984 if self.SourceFileList:
1985 for File in set(self.SourceFileList):
1986 DependencyFileSet.add(File)
1987
1988 # Add modules's include header files
1989 # Search dependency file list for each source file
1990 SourceFileList = []
1991 OutPutFileList = []
1992 for Target in self.IntroTargetList:
1993 SourceFileList.extend(Target.Inputs)
1994 OutPutFileList.extend(Target.Outputs)
1995 if OutPutFileList:
1996 for Item in OutPutFileList:
1997 if Item in SourceFileList:
1998 SourceFileList.remove(Item)
1999 SearchList = []
2000 for file_path in self.IncludePathList + self.BuildOptionIncPathList:
2001 # skip the folders in platform BuildDir which are not been generated yet
2002 if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):
2003 continue
2004 SearchList.append(file_path)
2005 FileDependencyDict = {}
2006 ForceIncludedFile = []
2007 for F in SourceFileList:
2008 # skip the files which are not been generated yet, because
2009 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
2010 if not os.path.exists(F.Path):
2011 continue
2012 FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)
2013
2014 if FileDependencyDict:
2015 for Dependency in FileDependencyDict.values():
2016 DependencyFileSet.update(set(Dependency))
2017
2018 # Caculate all above dependency files hash
2019 # Initialze hash object
2020 FileList = []
2021 m = hashlib.md5()
2022 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2023 if not os.path.exists(str(File)):
2024 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2025 continue
2026 with open(str(File), 'rb') as f:
2027 Content = f.read()
2028 m.update(Content)
2029 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2030
2031
2032 MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)
2033 MewIR.ModuleFilesHashDigest = m.digest()
2034 MewIR.ModuleFilesHashHexDigest = m.hexdigest()
2035 MewIR.ModuleFilesChain = FileList
2036 with GlobalData.cache_lock:
2037 try:
2038 IR = gDict[(self.MetaFile.Path, self.Arch)]
2039 IR.ModuleFilesHashDigest = m.digest()
2040 IR.ModuleFilesHashHexDigest = m.hexdigest()
2041 IR.ModuleFilesChain = FileList
2042 gDict[(self.MetaFile.Path, self.Arch)] = IR
2043 except:
2044 gDict[(self.MetaFile.Path, self.Arch)] = MewIR
2045
2046 return gDict[(self.MetaFile.Path, self.Arch)]
2047
2048 def GenPreMakefileHash(self, gDict):
2049 # Early exit if module or library has been hashed and is in memory
2050 if (self.MetaFile.Path, self.Arch) in gDict and \
2051 gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2052 return gDict[(self.MetaFile.Path, self.Arch)]
2053
2054 # skip if the module cache already crashed
2055 if (self.MetaFile.Path, self.Arch) in gDict and \
2056 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2057 return
2058
2059 # skip binary module
2060 if self.IsBinaryModule:
2061 return
2062
2063 if not (self.MetaFile.Path, self.Arch) in gDict or \
2064 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2065 self.GenModuleFilesHash(gDict)
2066
2067 if not (self.MetaFile.Path, self.Arch) in gDict or \
2068 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2069 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2070 return
2071
2072 # Initialze hash object
2073 m = hashlib.md5()
2074
2075 # Add Platform level hash
2076 if ('PlatformHash') in gDict:
2077 m.update(gDict[('PlatformHash')].encode('utf-8'))
2078 else:
2079 EdkLogger.quiet("[cache warning]: PlatformHash is missing")
2080
2081 # Add Package level hash
2082 if self.DependentPackageList:
2083 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
2084 if (Pkg.PackageName, 'PackageHash') in gDict:
2085 m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))
2086 else:
2087 EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))
2088
2089 # Add Library hash
2090 if self.LibraryAutoGenList:
2091 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2092 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2093 not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:
2094 Lib.GenPreMakefileHash(gDict)
2095 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)
2096
2097 # Add Module self
2098 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2099
2100 with GlobalData.cache_lock:
2101 IR = gDict[(self.MetaFile.Path, self.Arch)]
2102 IR.PreMakefileHashHexDigest = m.hexdigest()
2103 gDict[(self.MetaFile.Path, self.Arch)] = IR
2104
2105 return gDict[(self.MetaFile.Path, self.Arch)]
2106
2107 def GenMakeHeaderFilesHash(self, gDict):
2108 # Early exit if module or library has been hashed and is in memory
2109 if (self.MetaFile.Path, self.Arch) in gDict and \
2110 gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2111 return gDict[(self.MetaFile.Path, self.Arch)]
2112
2113 # skip if the module cache already crashed
2114 if (self.MetaFile.Path, self.Arch) in gDict and \
2115 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2116 return
2117
2118 # skip binary module
2119 if self.IsBinaryModule:
2120 return
2121
2122 if not (self.MetaFile.Path, self.Arch) in gDict or \
2123 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:
2124 if self.IsLibrary:
2125 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:
2126 self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2127 if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:
2128 self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]
2129 self.CreateCodeFile()
2130 if not (self.MetaFile.Path, self.Arch) in gDict or \
2131 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2132 self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))
2133
2134 if not (self.MetaFile.Path, self.Arch) in gDict or \
2135 not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \
2136 not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:
2137 EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2138 return
2139
2140 DependencyFileSet = set()
2141 # Add Makefile
2142 if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:
2143 DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)
2144 else:
2145 EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2146
2147 # Add header files
2148 if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2149 for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:
2150 DependencyFileSet.add(File)
2151 else:
2152 EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2153
2154 # Add AutoGen files
2155 if self.AutoGenFileList:
2156 for File in set(self.AutoGenFileList):
2157 DependencyFileSet.add(File)
2158
2159 # Caculate all above dependency files hash
2160 # Initialze hash object
2161 FileList = []
2162 m = hashlib.md5()
2163 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
2164 if not os.path.exists(str(File)):
2165 EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
2166 continue
2167 f = open(str(File), 'rb')
2168 Content = f.read()
2169 f.close()
2170 m.update(Content)
2171 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
2172
2173 with GlobalData.cache_lock:
2174 IR = gDict[(self.MetaFile.Path, self.Arch)]
2175 IR.AutoGenFileList = self.AutoGenFileList.keys()
2176 IR.MakeHeaderFilesHashChain = FileList
2177 IR.MakeHeaderFilesHashDigest = m.digest()
2178 gDict[(self.MetaFile.Path, self.Arch)] = IR
2179
2180 return gDict[(self.MetaFile.Path, self.Arch)]
2181
2182 def GenMakeHash(self, gDict):
2183 # Early exit if module or library has been hashed and is in memory
2184 if (self.MetaFile.Path, self.Arch) in gDict and \
2185 gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2186 return gDict[(self.MetaFile.Path, self.Arch)]
2187
2188 # skip if the module cache already crashed
2189 if (self.MetaFile.Path, self.Arch) in gDict and \
2190 gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2191 return
2192
2193 # skip binary module
2194 if self.IsBinaryModule:
2195 return
2196
2197 if not (self.MetaFile.Path, self.Arch) in gDict or \
2198 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:
2199 self.GenModuleFilesHash(gDict)
2200 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:
2201 self.GenMakeHeaderFilesHash(gDict)
2202
2203 if not (self.MetaFile.Path, self.Arch) in gDict or \
2204 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \
2205 not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \
2206 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \
2207 not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:
2208 EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2209 return
2210
2211 # Initialze hash object
2212 m = hashlib.md5()
2213 MakeHashChain = []
2214
2215 # Add hash of makefile and dependency header files
2216 m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)
2217 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))
2218 New.sort(key=lambda x: str(x))
2219 MakeHashChain += New
2220
2221 # Add Library hash
2222 if self.LibraryAutoGenList:
2223 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
2224 if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \
2225 not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:
2226 Lib.GenMakeHash(gDict)
2227 if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:
2228 print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)
2229 continue
2230 m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)
2231 New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))
2232 New.sort(key=lambda x: str(x))
2233 MakeHashChain += New
2234
2235 # Add Module self
2236 m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)
2237 New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))
2238 New.sort(key=lambda x: str(x))
2239 MakeHashChain += New
2240
2241 with GlobalData.cache_lock:
2242 IR = gDict[(self.MetaFile.Path, self.Arch)]
2243 IR.MakeHashDigest = m.digest()
2244 IR.MakeHashHexDigest = m.hexdigest()
2245 IR.MakeHashChain = MakeHashChain
2246 gDict[(self.MetaFile.Path, self.Arch)] = IR
2247
2248 return gDict[(self.MetaFile.Path, self.Arch)]
2249
2250 ## Decide whether we can skip the left autogen and make process
2251 def CanSkipbyPreMakefileCache(self, gDict):
2252 if not GlobalData.gBinCacheSource:
2253 return False
2254
2255 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:
2256 return True
2257
2258 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2259 return False
2260
2261 # If Module is binary, do not skip by cache
2262 if self.IsBinaryModule:
2263 return False
2264
2265 # .inc is contains binary information so do not skip by hash as well
2266 for f_ext in self.SourceFileList:
2267 if '.inc' in str(f_ext):
2268 return False
2269
2270 # Get the module hash values from stored cache and currrent build
2271 # then check whether cache hit based on the hash values
2272 # if cache hit, restore all the files from cache
2273 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2274 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2275
2276 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2277 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2278 if not os.path.exists(ModuleHashPair):
2279 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2280 with GlobalData.cache_lock:
2281 IR = gDict[(self.MetaFile.Path, self.Arch)]
2282 IR.CacheCrash = True
2283 gDict[(self.MetaFile.Path, self.Arch)] = IR
2284 return False
2285
2286 try:
2287 with open(ModuleHashPair, 'r') as f:
2288 ModuleHashPairList = json.load(f)
2289 except:
2290 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2291 return False
2292
2293 self.GenPreMakefileHash(gDict)
2294 if not (self.MetaFile.Path, self.Arch) in gDict or \
2295 not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:
2296 EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2297 return False
2298
2299 MakeHashStr = None
2300 CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest
2301 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2302 if PreMakefileHash == CurrentPreMakeHash:
2303 MakeHashStr = str(MakeHash)
2304
2305 if not MakeHashStr:
2306 return False
2307
2308 TargetHashDir = path.join(FileDir, MakeHashStr)
2309 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2310
2311 if not os.path.exists(TargetHashDir):
2312 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2313 return False
2314
2315 for root, dir, files in os.walk(TargetHashDir):
2316 for f in files:
2317 File = path.join(root, f)
2318 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2319 if os.path.exists(TargetFfsHashDir):
2320 for root, dir, files in os.walk(TargetFfsHashDir):
2321 for f in files:
2322 File = path.join(root, f)
2323 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2324
2325 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2326 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2327
2328 with GlobalData.cache_lock:
2329 IR = gDict[(self.MetaFile.Path, self.Arch)]
2330 IR.PreMakeCacheHit = True
2331 gDict[(self.MetaFile.Path, self.Arch)] = IR
2332 print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)
2333 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2334 return True
2335
2336 ## Decide whether we can skip the make process
2337 def CanSkipbyMakeCache(self, gDict):
2338 if not GlobalData.gBinCacheSource:
2339 return False
2340
2341 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2342 return True
2343
2344 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2345 return False
2346
2347 # If Module is binary, do not skip by cache
2348 if self.IsBinaryModule:
2349 print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)
2350 return False
2351
2352 # .inc is contains binary information so do not skip by hash as well
2353 for f_ext in self.SourceFileList:
2354 if '.inc' in str(f_ext):
2355 with GlobalData.cache_lock:
2356 IR = gDict[(self.MetaFile.Path, self.Arch)]
2357 IR.MakeCacheHit = False
2358 gDict[(self.MetaFile.Path, self.Arch)] = IR
2359 print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)
2360 return False
2361
2362 # Get the module hash values from stored cache and currrent build
2363 # then check whether cache hit based on the hash values
2364 # if cache hit, restore all the files from cache
2365 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2366 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2367
2368 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2369 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2370 if not os.path.exists(ModuleHashPair):
2371 EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)
2372 with GlobalData.cache_lock:
2373 IR = gDict[(self.MetaFile.Path, self.Arch)]
2374 IR.CacheCrash = True
2375 gDict[(self.MetaFile.Path, self.Arch)] = IR
2376 return False
2377
2378 try:
2379 with open(ModuleHashPair, 'r') as f:
2380 ModuleHashPairList = json.load(f)
2381 except:
2382 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2383 return False
2384
2385 self.GenMakeHash(gDict)
2386 if not (self.MetaFile.Path, self.Arch) in gDict or \
2387 not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:
2388 EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))
2389 return False
2390
2391 MakeHashStr = None
2392 CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest
2393 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2394 if MakeHash == CurrentMakeHash:
2395 MakeHashStr = str(MakeHash)
2396
2397 if not MakeHashStr:
2398 print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2399 return False
2400
2401 TargetHashDir = path.join(FileDir, MakeHashStr)
2402 TargetFfsHashDir = path.join(FfsDir, MakeHashStr)
2403 if not os.path.exists(TargetHashDir):
2404 EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)
2405 return False
2406
2407 for root, dir, files in os.walk(TargetHashDir):
2408 for f in files:
2409 File = path.join(root, f)
2410 self.CacheCopyFile(self.OutputDir, TargetHashDir, File)
2411
2412 if os.path.exists(TargetFfsHashDir):
2413 for root, dir, files in os.walk(TargetFfsHashDir):
2414 for f in files:
2415 File = path.join(root, f)
2416 self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)
2417
2418 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2419 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2420 with GlobalData.cache_lock:
2421 IR = gDict[(self.MetaFile.Path, self.Arch)]
2422 IR.MakeCacheHit = True
2423 gDict[(self.MetaFile.Path, self.Arch)] = IR
2424 print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)
2425 return True
2426
2427 ## Show the first file name which causes cache miss
2428 def PrintFirstMakeCacheMissFile(self, gDict):
2429 if not GlobalData.gBinCacheSource:
2430 return
2431
2432 # skip if the module cache already crashed
2433 if gDict[(self.MetaFile.Path, self.Arch)].CacheCrash:
2434 return
2435
2436 # skip binary module
2437 if self.IsBinaryModule:
2438 return
2439
2440 if not (self.MetaFile.Path, self.Arch) in gDict:
2441 return
2442
2443 # Only print cache miss file for the MakeCache not hit module
2444 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2445 return
2446
2447 if not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:
2448 EdkLogger.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self.MetaFile.Path, self.Arch))
2449 return
2450
2451 # Find the cache dir name through the .ModuleHashPair file info
2452 FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2453
2454 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2455 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
2456 if not os.path.exists(ModuleHashPair):
2457 EdkLogger.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2458 return
2459
2460 try:
2461 with open(ModuleHashPair, 'r') as f:
2462 ModuleHashPairList = json.load(f)
2463 except:
2464 EdkLogger.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2465 return
2466
2467 MakeHashSet = set()
2468 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2469 TargetHashDir = path.join(FileDir, str(MakeHash))
2470 if os.path.exists(TargetHashDir):
2471 MakeHashSet.add(MakeHash)
2472 if not MakeHashSet:
2473 EdkLogger.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2474 return
2475
2476 TargetHash = list(MakeHashSet)[0]
2477 TargetHashDir = path.join(FileDir, str(TargetHash))
2478 if len(MakeHashSet) > 1 :
2479 EdkLogger.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash, self.MetaFile.Path, self.Arch))
2480
2481 ListFile = path.join(TargetHashDir, self.Name + '.MakeHashChain')
2482 if os.path.exists(ListFile):
2483 try:
2484 f = open(ListFile, 'r')
2485 CachedList = json.load(f)
2486 f.close()
2487 except:
2488 EdkLogger.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile)
2489 return
2490 else:
2491 EdkLogger.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile)
2492 return
2493
2494 CurrentList = gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain
2495 for idx, (file, hash) in enumerate (CurrentList):
2496 (filecached, hashcached) = CachedList[idx]
2497 if file != filecached:
2498 EdkLogger.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self.MetaFile.Path, self.Arch, file, filecached))
2499 break
2500 if hash != hashcached:
2501 EdkLogger.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self.MetaFile.Path, self.Arch, file))
2502 break
2503
2504 return True
2505
2506 ## Decide whether we can skip the ModuleAutoGen process
2507 def CanSkipbyCache(self, gDict):
2508 # Hashing feature is off
2509 if not GlobalData.gBinCacheSource:
2510 return False
2511
2512 if self in GlobalData.gBuildHashSkipTracking:
2513 return GlobalData.gBuildHashSkipTracking[self]
2514
2515 # If library or Module is binary do not skip by hash
2516 if self.IsBinaryModule:
2517 GlobalData.gBuildHashSkipTracking[self] = False
2518 return False
2519
2520 # .inc is contains binary information so do not skip by hash as well
2521 for f_ext in self.SourceFileList:
2522 if '.inc' in str(f_ext):
2523 GlobalData.gBuildHashSkipTracking[self] = False
2524 return False
2525
2526 if not (self.MetaFile.Path, self.Arch) in gDict:
2527 return False
2528
2529 if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:
2530 GlobalData.gBuildHashSkipTracking[self] = True
2531 return True
2532
2533 if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:
2534 GlobalData.gBuildHashSkipTracking[self] = True
2535 return True
2536
2537 return False
2538
2539 ## Decide whether we can skip the ModuleAutoGen process
2540 # If any source file is newer than the module than we cannot skip
2541 #
2542 def CanSkip(self):
2543 # Don't skip if cache feature enabled
2544 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
2545 return False
2546 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
2547 return True
2548 if not os.path.exists(self.TimeStampPath):
2549 return False
2550 #last creation time of the module
2551 DstTimeStamp = os.stat(self.TimeStampPath)[8]
2552
2553 SrcTimeStamp = self.Workspace._SrcTimeStamp
2554 if SrcTimeStamp > DstTimeStamp:
2555 return False
2556
2557 with open(self.TimeStampPath,'r') as f:
2558 for source in f:
2559 source = source.rstrip('\n')
2560 if not os.path.exists(source):
2561 return False
2562 if source not in ModuleAutoGen.TimeDict :
2563 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
2564 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
2565 return False
2566 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
2567 return True
2568
2569 @cached_property
2570 def TimeStampPath(self):
2571 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')