]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: Remove CanSkip calling for incremental build
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 from AutoGen.AutoGen import AutoGen
9 from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath
10 from Common.BuildToolError import *
11 from Common.DataType import *
12 from Common.Misc import *
13 from Common.StringUtils import NormPath,GetSplitList
14 from collections import defaultdict
15 from Workspace.WorkspaceCommon import OrderedListDict
16 import os.path as path
17 import copy
18 import hashlib
19 from . import InfSectionParser
20 from . import GenC
21 from . import GenMake
22 from . import GenDepex
23 from io import BytesIO
24 from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
25 from Workspace.MetaFileCommentParser import UsageList
26 from .GenPcdDb import CreatePcdDatabaseCode
27 from Common.caching import cached_class_function
28 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
29 import json
30 import tempfile
31
32 ## Mapping Makefile type
33 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
34 #
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
36 # is the former use /I , the Latter used -I to specify include directories
37 #
38 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
39 gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
40
41 ## default file name for AutoGen
42 gAutoGenCodeFileName = "AutoGen.c"
43 gAutoGenHeaderFileName = "AutoGen.h"
44 gAutoGenStringFileName = "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName = "%(module_name)s.depex"
47 gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
49 gInfSpecVersion = "0x00010017"
50
51 #
52 # Match name = variable
53 #
54 gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
55 #
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
58 #
59 gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
60
61 #
62 # Template string to generic AsBuilt INF
63 #
64 gAsBuiltInfHeaderString = TemplateString("""${header_comments}
65
66 # DO NOT EDIT
67 # FILE auto-generated
68
69 [Defines]
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
91
92 [Packages.${module_arch}]${BEGIN}
93 ${package_item}${END}
94
95 [Binaries.${module_arch}]${BEGIN}
96 ${binary_item}${END}
97
98 [PatchPcd.${module_arch}]${BEGIN}
99 ${patchablepcd_item}
100 ${END}
101
102 [Protocols.${module_arch}]${BEGIN}
103 ${protocol_item}
104 ${END}
105
106 [Ppis.${module_arch}]${BEGIN}
107 ${ppi_item}
108 ${END}
109
110 [Guids.${module_arch}]${BEGIN}
111 ${guid_item}
112 ${END}
113
114 [PcdEx.${module_arch}]${BEGIN}
115 ${pcd_item}
116 ${END}
117
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
121
122 ${depexsection_item}
123
124 ${userextension_tianocore_item}
125
126 ${tail_comments}
127
128 [BuildOptions.${module_arch}]
129 ## @AsBuilt${BEGIN}
130 ## ${flags_item}${END}
131 """)
132 #
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
135 #
136 def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
137 for Key in CopyFromDict:
138 CopyToDict[Key].extend(CopyFromDict[Key])
139
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList):
142 RetVal = path.join(*PathList)
143 CreateDirectory(RetVal)
144 return RetVal
145
146 #
147 # Convert string to C format array
148 #
149 def _ConvertStringToByteArray(Value):
150 Value = Value.strip()
151 if not Value:
152 return None
153 if Value[0] == '{':
154 if not Value.endswith('}'):
155 return None
156 Value = Value.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields = Value.split(',')
158 try:
159 for Index in range(len(ValFields)):
160 ValFields[Index] = str(int(ValFields[Index], 0))
161 except ValueError:
162 return None
163 Value = '{' + ','.join(ValFields) + '}'
164 return Value
165
166 Unicode = False
167 if Value.startswith('L"'):
168 if not Value.endswith('"'):
169 return None
170 Value = Value[1:]
171 Unicode = True
172 elif not Value.startswith('"') or not Value.endswith('"'):
173 return None
174
175 Value = eval(Value) # translate escape character
176 NewValue = '{'
177 for Index in range(0, len(Value)):
178 if Unicode:
179 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
180 else:
181 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
182 Value = NewValue + '0}'
183 return Value
184
185 ## ModuleAutoGen class
186 #
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
190 #
191 class ModuleAutoGen(AutoGen):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
194 if not hasattr(self, "_Init"):
195 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
196 self._Init = True
197
198 ## Cache the timestamps of metafiles of every module in a class attribute
199 #
200 TimeDict = {}
201
202 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
205 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
206 % (MetaFile, Arch))
207 return None
208 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
209
210 ## Initialize ModuleAutoGen
211 #
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
218 #
219 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
220 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
221 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
222
223 self.Workspace = Workspace
224 self.WorkspaceDir = ""
225 self.PlatformInfo = None
226 self.DataPipe = DataPipe
227 self.__init_platform_info__()
228 self.MetaFile = ModuleFile
229 self.SourceDir = self.MetaFile.SubDir
230 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
231
232 self.ToolChain = Toolchain
233 self.BuildTarget = Target
234 self.Arch = Arch
235 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
236 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
237
238 self.IsCodeFileCreated = False
239 self.IsAsBuiltInfCreated = False
240 self.DepexGenerated = False
241
242 self.BuildDatabase = self.Workspace.BuildDatabase
243 self.BuildRuleOrder = None
244 self.BuildTime = 0
245
246 self._GuidComments = OrderedListDict()
247 self._ProtocolComments = OrderedListDict()
248 self._PpiComments = OrderedListDict()
249 self._BuildTargets = None
250 self._IntroBuildTargetList = None
251 self._FinalBuildTargetList = None
252 self._FileTypes = None
253
254 self.AutoGenDepSet = set()
255 self.ReferenceModules = []
256 self.ConstPcd = {}
257 self.Makefile = None
258 self.FileDependCache = {}
259
260 def __init_platform_info__(self):
261 pinfo = self.DataPipe.Get("P_Info")
262 self.WorkspaceDir = pinfo.get("WorkspaceDir")
263 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
264 ## hash() operator of ModuleAutoGen
265 #
266 # The module file path and arch string will be used to represent
267 # hash value of this object
268 #
269 # @retval int Hash value of the module file path and arch
270 #
271 @cached_class_function
272 def __hash__(self):
273 return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))
274 def __repr__(self):
275 return "%s [%s]" % (self.MetaFile, self.Arch)
276
277 # Get FixedAtBuild Pcds of this Module
278 @cached_property
279 def FixedAtBuildPcds(self):
280 RetVal = []
281 for Pcd in self.ModulePcdList:
282 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
283 continue
284 if Pcd not in RetVal:
285 RetVal.append(Pcd)
286 return RetVal
287
288 @cached_property
289 def FixedVoidTypePcds(self):
290 RetVal = {}
291 for Pcd in self.FixedAtBuildPcds:
292 if Pcd.DatumType == TAB_VOID:
293 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
294 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
295 return RetVal
296
297 @property
298 def UniqueBaseName(self):
299 ModuleNames = self.DataPipe.Get("M_Name")
300 if not ModuleNames:
301 return self.Name
302 return ModuleNames.get((self.Name,self.MetaFile),self.Name)
303
304 # Macros could be used in build_rule.txt (also Makefile)
305 @cached_property
306 def Macros(self):
307 return OrderedDict((
308 ("WORKSPACE" ,self.WorkspaceDir),
309 ("MODULE_NAME" ,self.Name),
310 ("MODULE_NAME_GUID" ,self.UniqueBaseName),
311 ("MODULE_GUID" ,self.Guid),
312 ("MODULE_VERSION" ,self.Version),
313 ("MODULE_TYPE" ,self.ModuleType),
314 ("MODULE_FILE" ,str(self.MetaFile)),
315 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
316 ("MODULE_RELATIVE_DIR" ,self.SourceDir),
317 ("MODULE_DIR" ,self.SourceDir),
318 ("BASE_NAME" ,self.Name),
319 ("ARCH" ,self.Arch),
320 ("TOOLCHAIN" ,self.ToolChain),
321 ("TOOLCHAIN_TAG" ,self.ToolChain),
322 ("TOOL_CHAIN_TAG" ,self.ToolChain),
323 ("TARGET" ,self.BuildTarget),
324 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
325 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
326 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
327 ("MODULE_BUILD_DIR" ,self.BuildDir),
328 ("OUTPUT_DIR" ,self.OutputDir),
329 ("DEBUG_DIR" ,self.DebugDir),
330 ("DEST_DIR_OUTPUT" ,self.OutputDir),
331 ("DEST_DIR_DEBUG" ,self.DebugDir),
332 ("PLATFORM_NAME" ,self.PlatformInfo.Name),
333 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
334 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
335 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
336 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
337 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
338 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
339 ))
340
341 ## Return the module build data object
342 @cached_property
343 def Module(self):
344 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
345
346 ## Return the module name
347 @cached_property
348 def Name(self):
349 return self.Module.BaseName
350
351 ## Return the module DxsFile if exist
352 @cached_property
353 def DxsFile(self):
354 return self.Module.DxsFile
355
356 ## Return the module meta-file GUID
357 @cached_property
358 def Guid(self):
359 #
360 # To build same module more than once, the module path with FILE_GUID overridden has
361 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
362 # in DSC. The overridden GUID can be retrieved from file name
363 #
364 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
365 #
366 # Length of GUID is 36
367 #
368 return os.path.basename(self.MetaFile.Path)[:36]
369 return self.Module.Guid
370
371 ## Return the module version
372 @cached_property
373 def Version(self):
374 return self.Module.Version
375
376 ## Return the module type
377 @cached_property
378 def ModuleType(self):
379 return self.Module.ModuleType
380
381 ## Return the component type (for Edk.x style of module)
382 @cached_property
383 def ComponentType(self):
384 return self.Module.ComponentType
385
386 ## Return the build type
387 @cached_property
388 def BuildType(self):
389 return self.Module.BuildType
390
391 ## Return the PCD_IS_DRIVER setting
392 @cached_property
393 def PcdIsDriver(self):
394 return self.Module.PcdIsDriver
395
396 ## Return the autogen version, i.e. module meta-file version
397 @cached_property
398 def AutoGenVersion(self):
399 return self.Module.AutoGenVersion
400
401 ## Check if the module is library or not
402 @cached_property
403 def IsLibrary(self):
404 return bool(self.Module.LibraryClass)
405
406 ## Check if the module is binary module or not
407 @cached_property
408 def IsBinaryModule(self):
409 return self.Module.IsBinaryModule
410
411 ## Return the directory to store intermediate files of the module
412 @cached_property
413 def BuildDir(self):
414 return _MakeDir((
415 self.PlatformInfo.BuildDir,
416 self.Arch,
417 self.SourceDir,
418 self.MetaFile.BaseName
419 ))
420
421 ## Return the directory to store the intermediate object files of the module
422 @cached_property
423 def OutputDir(self):
424 return _MakeDir((self.BuildDir, "OUTPUT"))
425
426 ## Return the directory path to store ffs file
427 @cached_property
428 def FfsOutputDir(self):
429 if GlobalData.gFdfParser:
430 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
431 return ''
432
433 ## Return the directory to store auto-gened source files of the module
434 @cached_property
435 def DebugDir(self):
436 return _MakeDir((self.BuildDir, "DEBUG"))
437
438 ## Return the path of custom file
439 @cached_property
440 def CustomMakefile(self):
441 RetVal = {}
442 for Type in self.Module.CustomMakefile:
443 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
444 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
445 RetVal[MakeType] = File
446 return RetVal
447
448 ## Return the directory of the makefile
449 #
450 # @retval string The directory string of module's makefile
451 #
452 @cached_property
453 def MakeFileDir(self):
454 return self.BuildDir
455
456 ## Return build command string
457 #
458 # @retval string Build command string
459 #
460 @cached_property
461 def BuildCommand(self):
462 return self.PlatformInfo.BuildCommand
463
464 ## Get Module package and Platform package
465 #
466 # @retval list The list of package object
467 #
468 @cached_property
469 def PackageList(self):
470 PkagList = []
471 if self.Module.Packages:
472 PkagList.extend(self.Module.Packages)
473 Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
474 for Package in Platform.Packages:
475 if Package in PkagList:
476 continue
477 PkagList.append(Package)
478 return PkagList
479
480 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
481 #
482 # @retval list The list of package object
483 #
484 @cached_property
485 def DerivedPackageList(self):
486 PackageList = []
487 PackageList.extend(self.PackageList)
488 for M in self.DependentLibraryList:
489 for Package in M.Packages:
490 if Package in PackageList:
491 continue
492 PackageList.append(Package)
493 return PackageList
494
495 ## Get the depex string
496 #
497 # @return : a string contain all depex expression.
498 def _GetDepexExpresionString(self):
499 DepexStr = ''
500 DepexList = []
501 ## DPX_SOURCE IN Define section.
502 if self.Module.DxsFile:
503 return DepexStr
504 for M in [self.Module] + self.DependentLibraryList:
505 Filename = M.MetaFile.Path
506 InfObj = InfSectionParser.InfSectionParser(Filename)
507 DepexExpressionList = InfObj.GetDepexExpresionList()
508 for DepexExpression in DepexExpressionList:
509 for key in DepexExpression:
510 Arch, ModuleType = key
511 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
512 # the type of build module is USER_DEFINED.
513 # All different DEPEX section tags would be copied into the As Built INF file
514 # and there would be separate DEPEX section tags
515 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
516 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
517 DepexList.append({(Arch, ModuleType): DepexExpr})
518 else:
519 if Arch.upper() == TAB_ARCH_COMMON or \
520 (Arch.upper() == self.Arch.upper() and \
521 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
522 DepexList.append({(Arch, ModuleType): DepexExpr})
523
524 #the type of build module is USER_DEFINED.
525 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
526 for Depex in DepexList:
527 for key in Depex:
528 DepexStr += '[Depex.%s.%s]\n' % key
529 DepexStr += '\n'.join('# '+ val for val in Depex[key])
530 DepexStr += '\n\n'
531 if not DepexStr:
532 return '[Depex.%s]\n' % self.Arch
533 return DepexStr
534
535 #the type of build module not is USER_DEFINED.
536 Count = 0
537 for Depex in DepexList:
538 Count += 1
539 if DepexStr != '':
540 DepexStr += ' AND '
541 DepexStr += '('
542 for D in Depex.values():
543 DepexStr += ' '.join(val for val in D)
544 Index = DepexStr.find('END')
545 if Index > -1 and Index == len(DepexStr) - 3:
546 DepexStr = DepexStr[:-3]
547 DepexStr = DepexStr.strip()
548 DepexStr += ')'
549 if Count == 1:
550 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
551 if not DepexStr:
552 return '[Depex.%s]\n' % self.Arch
553 return '[Depex.%s]\n# ' % self.Arch + DepexStr
554
555 ## Merge dependency expression
556 #
557 # @retval list The token list of the dependency expression after parsed
558 #
559 @cached_property
560 def DepexList(self):
561 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
562 return {}
563
564 DepexList = []
565 #
566 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
567 #
568 FixedVoidTypePcds = {}
569 for M in [self] + self.LibraryAutoGenList:
570 FixedVoidTypePcds.update(M.FixedVoidTypePcds)
571 for M in [self] + self.LibraryAutoGenList:
572 Inherited = False
573 for D in M.Module.Depex[self.Arch, self.ModuleType]:
574 if DepexList != []:
575 DepexList.append('AND')
576 DepexList.append('(')
577 #replace D with value if D is FixedAtBuild PCD
578 NewList = []
579 for item in D:
580 if '.' not in item:
581 NewList.append(item)
582 else:
583 try:
584 Value = FixedVoidTypePcds[item]
585 if len(Value.split(',')) != 16:
586 EdkLogger.error("build", FORMAT_INVALID,
587 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
588 NewList.append(Value)
589 except:
590 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
591
592 DepexList.extend(NewList)
593 if DepexList[-1] == 'END': # no need of a END at this time
594 DepexList.pop()
595 DepexList.append(')')
596 Inherited = True
597 if Inherited:
598 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
599 if 'BEFORE' in DepexList or 'AFTER' in DepexList:
600 break
601 if len(DepexList) > 0:
602 EdkLogger.verbose('')
603 return {self.ModuleType:DepexList}
604
605 ## Merge dependency expression
606 #
607 # @retval list The token list of the dependency expression after parsed
608 #
609 @cached_property
610 def DepexExpressionDict(self):
611 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
612 return {}
613
614 DepexExpressionString = ''
615 #
616 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
617 #
618 for M in [self.Module] + self.DependentLibraryList:
619 Inherited = False
620 for D in M.DepexExpression[self.Arch, self.ModuleType]:
621 if DepexExpressionString != '':
622 DepexExpressionString += ' AND '
623 DepexExpressionString += '('
624 DepexExpressionString += D
625 DepexExpressionString = DepexExpressionString.rstrip('END').strip()
626 DepexExpressionString += ')'
627 Inherited = True
628 if Inherited:
629 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
630 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
631 break
632 if len(DepexExpressionString) > 0:
633 EdkLogger.verbose('')
634
635 return {self.ModuleType:DepexExpressionString}
636
637 # Get the tiano core user extension, it is contain dependent library.
638 # @retval: a list contain tiano core userextension.
639 #
640 def _GetTianoCoreUserExtensionList(self):
641 TianoCoreUserExtentionList = []
642 for M in [self.Module] + self.DependentLibraryList:
643 Filename = M.MetaFile.Path
644 InfObj = InfSectionParser.InfSectionParser(Filename)
645 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
646 for TianoCoreUserExtent in TianoCoreUserExtenList:
647 for Section in TianoCoreUserExtent:
648 ItemList = Section.split(TAB_SPLIT)
649 Arch = self.Arch
650 if len(ItemList) == 4:
651 Arch = ItemList[3]
652 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
653 TianoCoreList = []
654 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
655 TianoCoreList.extend(TianoCoreUserExtent[Section][:])
656 TianoCoreList.append('\n')
657 TianoCoreUserExtentionList.append(TianoCoreList)
658
659 return TianoCoreUserExtentionList
660
661 ## Return the list of specification version required for the module
662 #
663 # @retval list The list of specification defined in module file
664 #
665 @cached_property
666 def Specification(self):
667 return self.Module.Specification
668
669 ## Tool option for the module build
670 #
671 # @param PlatformInfo The object of PlatformBuildInfo
672 # @retval dict The dict containing valid options
673 #
674 @cached_property
675 def BuildOption(self):
676 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
677 if self.BuildRuleOrder:
678 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
679 return RetVal
680
681 ## Get include path list from tool option for the module build
682 #
683 # @retval list The include path list
684 #
685 @cached_property
686 def BuildOptionIncPathList(self):
687 #
688 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
689 # is the former use /I , the Latter used -I to specify include directories
690 #
691 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
692 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
693 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
694 BuildOptIncludeRegEx = gBuildOptIncludePatternOther
695 else:
696 #
697 # New ToolChainFamily, don't known whether there is option to specify include directories
698 #
699 return []
700
701 RetVal = []
702 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
703 try:
704 FlagOption = self.BuildOption[Tool]['FLAGS']
705 except KeyError:
706 FlagOption = ''
707
708 if self.ToolChainFamily != 'RVCT':
709 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
710 else:
711 #
712 # RVCT may specify a list of directory seperated by commas
713 #
714 IncPathList = []
715 for Path in BuildOptIncludeRegEx.findall(FlagOption):
716 PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
717 IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
718
719 #
720 # EDK II modules must not reference header files outside of the packages they depend on or
721 # within the module's directory tree. Report error if violation.
722 #
723 if GlobalData.gDisableIncludePathCheck == False:
724 for Path in IncPathList:
725 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
726 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
727 EdkLogger.error("build",
728 PARAMETER_INVALID,
729 ExtraData=ErrMsg,
730 File=str(self.MetaFile))
731 RetVal += IncPathList
732 return RetVal
733
734 ## Return a list of files which can be built from source
735 #
736 # What kind of files can be built is determined by build rules in
737 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
738 #
739 @cached_property
740 def SourceFileList(self):
741 RetVal = []
742 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
743 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
744 for F in self.Module.Sources:
745 # match tool chain
746 if F.TagName not in ToolChainTagSet:
747 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
748 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
749 continue
750 # match tool chain family or build rule family
751 if F.ToolChainFamily not in ToolChainFamilySet:
752 EdkLogger.debug(
753 EdkLogger.DEBUG_0,
754 "The file [%s] must be built by tools of [%s], " \
755 "but current toolchain family is [%s], buildrule family is [%s]" \
756 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
757 continue
758
759 # add the file path into search path list for file including
760 if F.Dir not in self.IncludePathList:
761 self.IncludePathList.insert(0, F.Dir)
762 RetVal.append(F)
763
764 self._MatchBuildRuleOrder(RetVal)
765
766 for F in RetVal:
767 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
768 return RetVal
769
770 def _MatchBuildRuleOrder(self, FileList):
771 Order_Dict = {}
772 self.BuildOption
773 for SingleFile in FileList:
774 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
775 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
776 if key in Order_Dict:
777 Order_Dict[key].append(SingleFile.Ext)
778 else:
779 Order_Dict[key] = [SingleFile.Ext]
780
781 RemoveList = []
782 for F in Order_Dict:
783 if len(Order_Dict[F]) > 1:
784 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
785 for Ext in Order_Dict[F][1:]:
786 RemoveList.append(F + Ext)
787
788 for item in RemoveList:
789 FileList.remove(item)
790
791 return FileList
792
793 ## Return the list of unicode files
794 @cached_property
795 def UnicodeFileList(self):
796 return self.FileTypes.get(TAB_UNICODE_FILE,[])
797
798 ## Return the list of vfr files
799 @cached_property
800 def VfrFileList(self):
801 return self.FileTypes.get(TAB_VFR_FILE, [])
802
803 ## Return the list of Image Definition files
804 @cached_property
805 def IdfFileList(self):
806 return self.FileTypes.get(TAB_IMAGE_FILE,[])
807
808 ## Return a list of files which can be built from binary
809 #
810 # "Build" binary files are just to copy them to build directory.
811 #
812 # @retval list The list of files which can be built later
813 #
814 @cached_property
815 def BinaryFileList(self):
816 RetVal = []
817 for F in self.Module.Binaries:
818 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
819 continue
820 RetVal.append(F)
821 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
822 return RetVal
823
824 @cached_property
825 def BuildRules(self):
826 RetVal = {}
827 BuildRuleDatabase = self.PlatformInfo.BuildRule
828 for Type in BuildRuleDatabase.FileTypeList:
829 #first try getting build rule by BuildRuleFamily
830 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
831 if not RuleObject:
832 # build type is always module type, but ...
833 if self.ModuleType != self.BuildType:
834 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
835 #second try getting build rule by ToolChainFamily
836 if not RuleObject:
837 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
838 if not RuleObject:
839 # build type is always module type, but ...
840 if self.ModuleType != self.BuildType:
841 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
842 if not RuleObject:
843 continue
844 RuleObject = RuleObject.Instantiate(self.Macros)
845 RetVal[Type] = RuleObject
846 for Ext in RuleObject.SourceFileExtList:
847 RetVal[Ext] = RuleObject
848 return RetVal
849
850 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
851 if self._BuildTargets is None:
852 self._IntroBuildTargetList = set()
853 self._FinalBuildTargetList = set()
854 self._BuildTargets = defaultdict(set)
855 self._FileTypes = defaultdict(set)
856
857 if not BinaryFileList:
858 BinaryFileList = self.BinaryFileList
859
860 SubDirectory = os.path.join(self.OutputDir, File.SubDir)
861 if not os.path.exists(SubDirectory):
862 CreateDirectory(SubDirectory)
863 TargetList = set()
864 FinalTargetName = set()
865 RuleChain = set()
866 SourceList = [File]
867 Index = 0
868 #
869 # Make sure to get build rule order value
870 #
871 self.BuildOption
872
873 while Index < len(SourceList):
874 # Reset the FileType if not the first iteration.
875 if Index > 0:
876 FileType = TAB_UNKNOWN_FILE
877 Source = SourceList[Index]
878 Index = Index + 1
879
880 if Source != File:
881 CreateDirectory(Source.Dir)
882
883 if File.IsBinary and File == Source and File in BinaryFileList:
884 # Skip all files that are not binary libraries
885 if not self.IsLibrary:
886 continue
887 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
888 elif FileType in self.BuildRules:
889 RuleObject = self.BuildRules[FileType]
890 elif Source.Ext in self.BuildRules:
891 RuleObject = self.BuildRules[Source.Ext]
892 else:
893 # No more rule to apply: Source is a final target.
894 FinalTargetName.add(Source)
895 continue
896
897 FileType = RuleObject.SourceFileType
898 self._FileTypes[FileType].add(Source)
899
900 # stop at STATIC_LIBRARY for library
901 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
902 FinalTargetName.add(Source)
903 continue
904
905 Target = RuleObject.Apply(Source, self.BuildRuleOrder)
906 if not Target:
907 # No Target: Source is a final target.
908 FinalTargetName.add(Source)
909 continue
910
911 TargetList.add(Target)
912 self._BuildTargets[FileType].add(Target)
913
914 if not Source.IsBinary and Source == File:
915 self._IntroBuildTargetList.add(Target)
916
917 # to avoid cyclic rule
918 if FileType in RuleChain:
919 EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))
920
921 RuleChain.add(FileType)
922 SourceList.extend(Target.Outputs)
923
924 # For each final target name, retrieve the corresponding TargetDescBlock instance.
925 for FTargetName in FinalTargetName:
926 for Target in TargetList:
927 if FTargetName == Target.Target:
928 self._FinalBuildTargetList.add(Target)
929
930 @cached_property
931 def Targets(self):
932 if self._BuildTargets is None:
933 self._IntroBuildTargetList = set()
934 self._FinalBuildTargetList = set()
935 self._BuildTargets = defaultdict(set)
936 self._FileTypes = defaultdict(set)
937
938 #TRICK: call SourceFileList property to apply build rule for source files
939 self.SourceFileList
940
941 #TRICK: call _GetBinaryFileList to apply build rule for binary files
942 self.BinaryFileList
943
944 return self._BuildTargets
945
946 @cached_property
947 def IntroTargetList(self):
948 self.Targets
949 return self._IntroBuildTargetList
950
951 @cached_property
952 def CodaTargetList(self):
953 self.Targets
954 return self._FinalBuildTargetList
955
956 @cached_property
957 def FileTypes(self):
958 self.Targets
959 return self._FileTypes
960
961 ## Get the list of package object the module depends on and the Platform depends on
962 #
963 # @retval list The package object list
964 #
965 @cached_property
966 def DependentPackageList(self):
967 return self.PackageList
968
969 ## Return the list of auto-generated code file
970 #
971 # @retval list The list of auto-generated file
972 #
973 @cached_property
974 def AutoGenFileList(self):
975 AutoGenUniIdf = self.BuildType != 'UEFI_HII'
976 UniStringBinBuffer = BytesIO()
977 IdfGenBinBuffer = BytesIO()
978 RetVal = {}
979 AutoGenC = TemplateString()
980 AutoGenH = TemplateString()
981 StringH = TemplateString()
982 StringIdf = TemplateString()
983 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
984 #
985 # AutoGen.c is generated if there are library classes in inf, or there are object files
986 #
987 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
988 or TAB_OBJECT_FILE in self.FileTypes):
989 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
990 RetVal[AutoFile] = str(AutoGenC)
991 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
992 if str(AutoGenH) != "":
993 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
994 RetVal[AutoFile] = str(AutoGenH)
995 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
996 if str(StringH) != "":
997 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
998 RetVal[AutoFile] = str(StringH)
999 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1000 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
1001 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
1002 RetVal[AutoFile] = UniStringBinBuffer.getvalue()
1003 AutoFile.IsBinary = True
1004 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1005 if UniStringBinBuffer is not None:
1006 UniStringBinBuffer.close()
1007 if str(StringIdf) != "":
1008 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
1009 RetVal[AutoFile] = str(StringIdf)
1010 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1011 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
1012 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
1013 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
1014 AutoFile.IsBinary = True
1015 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1016 if IdfGenBinBuffer is not None:
1017 IdfGenBinBuffer.close()
1018 return RetVal
1019
1020 ## Return the list of library modules explicitly or implicitly used by this module
1021 @cached_property
1022 def DependentLibraryList(self):
1023 # only merge library classes and PCD for non-library module
1024 if self.IsLibrary:
1025 return []
1026 return self.PlatformInfo.ApplyLibraryInstance(self.Module)
1027
1028 ## Get the list of PCDs from current module
1029 #
1030 # @retval list The list of PCD
1031 #
1032 @cached_property
1033 def ModulePcdList(self):
1034 # apply PCD settings from platform
1035 RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
1036
1037 return RetVal
1038 @cached_property
1039 def _PcdComments(self):
1040 ReVal = OrderedListDict()
1041 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
1042 if not self.IsLibrary:
1043 for Library in self.DependentLibraryList:
1044 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
1045 return ReVal
1046
1047 ## Get the list of PCDs from dependent libraries
1048 #
1049 # @retval list The list of PCD
1050 #
1051 @cached_property
1052 def LibraryPcdList(self):
1053 if self.IsLibrary:
1054 return []
1055 RetVal = []
1056 Pcds = set()
1057 # get PCDs from dependent libraries
1058 for Library in self.DependentLibraryList:
1059 PcdsInLibrary = OrderedDict()
1060 for Key in Library.Pcds:
1061 # skip duplicated PCDs
1062 if Key in self.Module.Pcds or Key in Pcds:
1063 continue
1064 Pcds.add(Key)
1065 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
1066 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))
1067 return RetVal
1068
1069 ## Get the GUID value mapping
1070 #
1071 # @retval dict The mapping between GUID cname and its value
1072 #
1073 @cached_property
1074 def GuidList(self):
1075 RetVal = self.Module.Guids
1076 for Library in self.DependentLibraryList:
1077 RetVal.update(Library.Guids)
1078 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
1079 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
1080 return RetVal
1081
1082 @cached_property
1083 def GetGuidsUsedByPcd(self):
1084 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
1085 for Library in self.DependentLibraryList:
1086 RetVal.update(Library.GetGuidsUsedByPcd())
1087 return RetVal
1088 ## Get the protocol value mapping
1089 #
1090 # @retval dict The mapping between protocol cname and its value
1091 #
1092 @cached_property
1093 def ProtocolList(self):
1094 RetVal = OrderedDict(self.Module.Protocols)
1095 for Library in self.DependentLibraryList:
1096 RetVal.update(Library.Protocols)
1097 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
1098 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
1099 return RetVal
1100
1101 ## Get the PPI value mapping
1102 #
1103 # @retval dict The mapping between PPI cname and its value
1104 #
1105 @cached_property
1106 def PpiList(self):
1107 RetVal = OrderedDict(self.Module.Ppis)
1108 for Library in self.DependentLibraryList:
1109 RetVal.update(Library.Ppis)
1110 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
1111 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
1112 return RetVal
1113
1114 ## Get the list of include search path
1115 #
1116 # @retval list The list path
1117 #
1118 @cached_property
1119 def IncludePathList(self):
1120 RetVal = []
1121 RetVal.append(self.MetaFile.Dir)
1122 RetVal.append(self.DebugDir)
1123
1124 for Package in self.PackageList:
1125 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1126 if PackageDir not in RetVal:
1127 RetVal.append(PackageDir)
1128 IncludesList = Package.Includes
1129 if Package._PrivateIncludes:
1130 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
1131 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1132 for Inc in IncludesList:
1133 if Inc not in RetVal:
1134 RetVal.append(str(Inc))
1135 RetVal.extend(self.IncPathFromBuildOptions)
1136 return RetVal
1137
1138 @cached_property
1139 def IncPathFromBuildOptions(self):
1140 IncPathList = []
1141 for tool in self.BuildOption:
1142 if 'FLAGS' in self.BuildOption[tool]:
1143 flags = self.BuildOption[tool]['FLAGS']
1144 whitespace = False
1145 for flag in flags.split(" "):
1146 flag = flag.strip()
1147 if flag.startswith(("/I","-I")):
1148 if len(flag)>2:
1149 if os.path.exists(flag[2:]):
1150 IncPathList.append(flag[2:])
1151 else:
1152 whitespace = True
1153 continue
1154 if whitespace and flag:
1155 if os.path.exists(flag):
1156 IncPathList.append(flag)
1157 whitespace = False
1158 return IncPathList
1159
1160 @cached_property
1161 def IncludePathLength(self):
1162 return sum(len(inc)+1 for inc in self.IncludePathList)
1163
1164 ## Get the list of include paths from the packages
1165 #
1166 # @IncludesList list The list path
1167 #
1168 @cached_property
1169 def PackageIncludePathList(self):
1170 IncludesList = []
1171 for Package in self.PackageList:
1172 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1173 IncludesList = Package.Includes
1174 if Package._PrivateIncludes:
1175 if not self.MetaFile.Path.startswith(PackageDir):
1176 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1177 return IncludesList
1178
1179 ## Get HII EX PCDs which maybe used by VFR
1180 #
1181 # efivarstore used by VFR may relate with HII EX PCDs
1182 # Get the variable name and GUID from efivarstore and HII EX PCD
1183 # List the HII EX PCDs in As Built INF if both name and GUID match.
1184 #
1185 # @retval list HII EX PCDs
1186 #
1187 def _GetPcdsMaybeUsedByVfr(self):
1188 if not self.SourceFileList:
1189 return []
1190
1191 NameGuids = set()
1192 for SrcFile in self.SourceFileList:
1193 if SrcFile.Ext.lower() != '.vfr':
1194 continue
1195 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
1196 if not os.path.exists(Vfri):
1197 continue
1198 VfriFile = open(Vfri, 'r')
1199 Content = VfriFile.read()
1200 VfriFile.close()
1201 Pos = Content.find('efivarstore')
1202 while Pos != -1:
1203 #
1204 # Make sure 'efivarstore' is the start of efivarstore statement
1205 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1206 #
1207 Index = Pos - 1
1208 while Index >= 0 and Content[Index] in ' \t\r\n':
1209 Index -= 1
1210 if Index >= 0 and Content[Index] != ';':
1211 Pos = Content.find('efivarstore', Pos + len('efivarstore'))
1212 continue
1213 #
1214 # 'efivarstore' must be followed by name and guid
1215 #
1216 Name = gEfiVarStoreNamePattern.search(Content, Pos)
1217 if not Name:
1218 break
1219 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
1220 if not Guid:
1221 break
1222 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
1223 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
1224 Pos = Content.find('efivarstore', Name.end())
1225 if not NameGuids:
1226 return []
1227 HiiExPcds = []
1228 for Pcd in self.PlatformInfo.Pcds.values():
1229 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
1230 continue
1231 for SkuInfo in Pcd.SkuInfoList.values():
1232 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
1233 if not Value:
1234 continue
1235 Name = _ConvertStringToByteArray(SkuInfo.VariableName)
1236 Guid = GuidStructureStringToGuidString(Value)
1237 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
1238 HiiExPcds.append(Pcd)
1239 break
1240
1241 return HiiExPcds
1242
1243 def _GenOffsetBin(self):
1244 VfrUniBaseName = {}
1245 for SourceFile in self.Module.Sources:
1246 if SourceFile.Type.upper() == ".VFR" :
1247 #
1248 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1249 #
1250 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
1251 elif SourceFile.Type.upper() == ".UNI" :
1252 #
1253 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1254 #
1255 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
1256
1257 if not VfrUniBaseName:
1258 return None
1259 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
1260 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
1261 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
1262 if not VfrUniOffsetList:
1263 return None
1264
1265 OutputName = '%sOffset.bin' % self.Name
1266 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
1267
1268 try:
1269 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
1270 except:
1271 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
1272
1273 # Use a instance of BytesIO to cache data
1274 fStringIO = BytesIO()
1275
1276 for Item in VfrUniOffsetList:
1277 if (Item[0].find("Strings") != -1):
1278 #
1279 # UNI offset in image.
1280 # GUID + Offset
1281 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1282 #
1283 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1284 fStringIO.write(UniGuid)
1285 UniValue = pack ('Q', int (Item[1], 16))
1286 fStringIO.write (UniValue)
1287 else:
1288 #
1289 # VFR binary offset in image.
1290 # GUID + Offset
1291 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1292 #
1293 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1294 fStringIO.write(VfrGuid)
1295 VfrValue = pack ('Q', int (Item[1], 16))
1296 fStringIO.write (VfrValue)
1297 #
1298 # write data into file.
1299 #
1300 try :
1301 fInputfile.write (fStringIO.getvalue())
1302 except:
1303 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
1304 "file been locked or using by other applications." %UniVfrOffsetFileName, None)
1305
1306 fStringIO.close ()
1307 fInputfile.close ()
1308 return OutputName
1309
1310 @cached_property
1311 def OutputFile(self):
1312 retVal = set()
1313
1314 for Root, Dirs, Files in os.walk(self.BuildDir):
1315 for File in Files:
1316 # lib file is already added through above CodaTargetList, skip it here
1317 if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):
1318 NewFile = path.join(Root, File)
1319 retVal.add(NewFile)
1320
1321 for Root, Dirs, Files in os.walk(self.FfsOutputDir):
1322 for File in Files:
1323 NewFile = path.join(Root, File)
1324 retVal.add(NewFile)
1325
1326 return retVal
1327
1328 ## Create AsBuilt INF file the module
1329 #
1330 def CreateAsBuiltInf(self):
1331
1332 if self.IsAsBuiltInfCreated:
1333 return
1334
1335 # Skip INF file generation for libraries
1336 if self.IsLibrary:
1337 return
1338
1339 # Skip the following code for modules with no source files
1340 if not self.SourceFileList:
1341 return
1342
1343 # Skip the following code for modules without any binary files
1344 if self.BinaryFileList:
1345 return
1346
1347 ### TODO: How to handles mixed source and binary modules
1348
1349 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1350 # Also find all packages that the DynamicEx PCDs depend on
1351 Pcds = []
1352 PatchablePcds = []
1353 Packages = []
1354 PcdCheckList = []
1355 PcdTokenSpaceList = []
1356 for Pcd in self.ModulePcdList + self.LibraryPcdList:
1357 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
1358 PatchablePcds.append(Pcd)
1359 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
1360 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
1361 if Pcd not in Pcds:
1362 Pcds.append(Pcd)
1363 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
1364 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
1365 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
1366 GuidList = OrderedDict(self.GuidList)
1367 for TokenSpace in self.GetGuidsUsedByPcd:
1368 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1369 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1370 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
1371 GuidList.pop(TokenSpace)
1372 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
1373 for Package in self.DerivedPackageList:
1374 if Package in Packages:
1375 continue
1376 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
1377 Found = False
1378 for Index in range(len(BeChecked)):
1379 for Item in CheckList[Index]:
1380 if Item in BeChecked[Index]:
1381 Packages.append(Package)
1382 Found = True
1383 break
1384 if Found:
1385 break
1386
1387 VfrPcds = self._GetPcdsMaybeUsedByVfr()
1388 for Pkg in self.PlatformInfo.PackageList:
1389 if Pkg in Packages:
1390 continue
1391 for VfrPcd in VfrPcds:
1392 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
1393 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
1394 Packages.append(Pkg)
1395 break
1396
1397 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
1398 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
1399 Guid = self.Guid
1400 MDefs = self.Module.Defines
1401
1402 AsBuiltInfDict = {
1403 'module_name' : self.Name,
1404 'module_guid' : Guid,
1405 'module_module_type' : ModuleType,
1406 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
1407 'pcd_is_driver_string' : [],
1408 'module_uefi_specification_version' : [],
1409 'module_pi_specification_version' : [],
1410 'module_entry_point' : self.Module.ModuleEntryPointList,
1411 'module_unload_image' : self.Module.ModuleUnloadImageList,
1412 'module_constructor' : self.Module.ConstructorList,
1413 'module_destructor' : self.Module.DestructorList,
1414 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
1415 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
1416 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
1417 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
1418 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
1419 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
1420 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
1421 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
1422 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
1423 'module_arch' : self.Arch,
1424 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
1425 'binary_item' : [],
1426 'patchablepcd_item' : [],
1427 'pcd_item' : [],
1428 'protocol_item' : [],
1429 'ppi_item' : [],
1430 'guid_item' : [],
1431 'flags_item' : [],
1432 'libraryclasses_item' : []
1433 }
1434
1435 if 'MODULE_UNI_FILE' in MDefs:
1436 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
1437 if os.path.isfile(UNIFile):
1438 shutil.copy2(UNIFile, self.OutputDir)
1439
1440 if self.AutoGenVersion > int(gInfSpecVersion, 0):
1441 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
1442 else:
1443 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
1444
1445 if DriverType:
1446 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
1447
1448 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
1449 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
1450 if 'PI_SPECIFICATION_VERSION' in self.Specification:
1451 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
1452
1453 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1454 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1455 for Item in self.CodaTargetList:
1456 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1457 if os.path.isabs(File):
1458 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
1459 if Item.Target.Ext.lower() == '.aml':
1460 AsBuiltInfDict['binary_item'].append('ASL|' + File)
1461 elif Item.Target.Ext.lower() == '.acpi':
1462 AsBuiltInfDict['binary_item'].append('ACPI|' + File)
1463 elif Item.Target.Ext.lower() == '.efi':
1464 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
1465 else:
1466 AsBuiltInfDict['binary_item'].append('BIN|' + File)
1467 if not self.DepexGenerated:
1468 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
1469 if os.path.exists(DepexFile):
1470 self.DepexGenerated = True
1471 if self.DepexGenerated:
1472 if self.ModuleType in [SUP_MODULE_PEIM]:
1473 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
1474 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
1475 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
1476 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
1477 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
1478
1479 Bin = self._GenOffsetBin()
1480 if Bin:
1481 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
1482
1483 for Root, Dirs, Files in os.walk(OutputDir):
1484 for File in Files:
1485 if File.lower().endswith('.pdb'):
1486 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
1487 HeaderComments = self.Module.HeaderComments
1488 StartPos = 0
1489 for Index in range(len(HeaderComments)):
1490 if HeaderComments[Index].find('@BinaryHeader') != -1:
1491 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
1492 StartPos = Index
1493 break
1494 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
1495 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
1496
1497 GenList = [
1498 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
1499 (self.PpiList, self._PpiComments, 'ppi_item'),
1500 (GuidList, self._GuidComments, 'guid_item')
1501 ]
1502 for Item in GenList:
1503 for CName in Item[0]:
1504 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
1505 Entry = Comments + '\n ' + CName if Comments else CName
1506 AsBuiltInfDict[Item[2]].append(Entry)
1507 PatchList = parsePcdInfoFromMapFile(
1508 os.path.join(self.OutputDir, self.Name + '.map'),
1509 os.path.join(self.OutputDir, self.Name + '.efi')
1510 )
1511 if PatchList:
1512 for Pcd in PatchablePcds:
1513 TokenCName = Pcd.TokenCName
1514 for PcdItem in GlobalData.MixedPcd:
1515 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1516 TokenCName = PcdItem[0]
1517 break
1518 for PatchPcd in PatchList:
1519 if TokenCName == PatchPcd[0]:
1520 break
1521 else:
1522 continue
1523 PcdValue = ''
1524 if Pcd.DatumType == 'BOOLEAN':
1525 BoolValue = Pcd.DefaultValue.upper()
1526 if BoolValue == 'TRUE':
1527 Pcd.DefaultValue = '1'
1528 elif BoolValue == 'FALSE':
1529 Pcd.DefaultValue = '0'
1530
1531 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
1532 HexFormat = '0x%02x'
1533 if Pcd.DatumType == TAB_UINT16:
1534 HexFormat = '0x%04x'
1535 elif Pcd.DatumType == TAB_UINT32:
1536 HexFormat = '0x%08x'
1537 elif Pcd.DatumType == TAB_UINT64:
1538 HexFormat = '0x%016x'
1539 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
1540 else:
1541 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
1542 EdkLogger.error("build", AUTOGEN_ERROR,
1543 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
1544 )
1545 ArraySize = int(Pcd.MaxDatumSize, 0)
1546 PcdValue = Pcd.DefaultValue
1547 if PcdValue[0] != '{':
1548 Unicode = False
1549 if PcdValue[0] == 'L':
1550 Unicode = True
1551 PcdValue = PcdValue.lstrip('L')
1552 PcdValue = eval(PcdValue)
1553 NewValue = '{'
1554 for Index in range(0, len(PcdValue)):
1555 if Unicode:
1556 CharVal = ord(PcdValue[Index])
1557 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
1558 + '0x%02x' % (CharVal >> 8) + ', '
1559 else:
1560 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
1561 Padding = '0x00, '
1562 if Unicode:
1563 Padding = Padding * 2
1564 ArraySize = ArraySize // 2
1565 if ArraySize < (len(PcdValue) + 1):
1566 if Pcd.MaxSizeUserSet:
1567 EdkLogger.error("build", AUTOGEN_ERROR,
1568 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1569 )
1570 else:
1571 ArraySize = len(PcdValue) + 1
1572 if ArraySize > len(PcdValue) + 1:
1573 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
1574 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
1575 elif len(PcdValue.split(',')) <= ArraySize:
1576 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
1577 PcdValue += '}'
1578 else:
1579 if Pcd.MaxSizeUserSet:
1580 EdkLogger.error("build", AUTOGEN_ERROR,
1581 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1582 )
1583 else:
1584 ArraySize = len(PcdValue) + 1
1585 PcdItem = '%s.%s|%s|0x%X' % \
1586 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
1587 PcdComments = ''
1588 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1589 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
1590 if PcdComments:
1591 PcdItem = PcdComments + '\n ' + PcdItem
1592 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
1593
1594 for Pcd in Pcds + VfrPcds:
1595 PcdCommentList = []
1596 HiiInfo = ''
1597 TokenCName = Pcd.TokenCName
1598 for PcdItem in GlobalData.MixedPcd:
1599 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1600 TokenCName = PcdItem[0]
1601 break
1602 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
1603 for SkuName in Pcd.SkuInfoList:
1604 SkuInfo = Pcd.SkuInfoList[SkuName]
1605 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
1606 break
1607 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1608 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
1609 if HiiInfo:
1610 UsageIndex = -1
1611 UsageStr = ''
1612 for Index, Comment in enumerate(PcdCommentList):
1613 for Usage in UsageList:
1614 if Comment.find(Usage) != -1:
1615 UsageStr = Usage
1616 UsageIndex = Index
1617 break
1618 if UsageIndex != -1:
1619 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
1620 else:
1621 PcdCommentList.append('## UNDEFINED ' + HiiInfo)
1622 PcdComments = '\n '.join(PcdCommentList)
1623 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
1624 if PcdComments:
1625 PcdEntry = PcdComments + '\n ' + PcdEntry
1626 AsBuiltInfDict['pcd_item'].append(PcdEntry)
1627 for Item in self.BuildOption:
1628 if 'FLAGS' in self.BuildOption[Item]:
1629 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
1630
1631 # Generated LibraryClasses section in comments.
1632 for Library in self.LibraryAutoGenList:
1633 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
1634
1635 # Generated UserExtensions TianoCore section.
1636 # All tianocore user extensions are copied.
1637 UserExtStr = ''
1638 for TianoCore in self._GetTianoCoreUserExtensionList():
1639 UserExtStr += '\n'.join(TianoCore)
1640 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
1641 if os.path.isfile(ExtensionFile):
1642 shutil.copy2(ExtensionFile, self.OutputDir)
1643 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
1644
1645 # Generated depex expression section in comments.
1646 DepexExpression = self._GetDepexExpresionString()
1647 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
1648
1649 AsBuiltInf = TemplateString()
1650 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
1651
1652 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
1653
1654 self.IsAsBuiltInfCreated = True
1655
1656 def CacheCopyFile(self, DestDir, SourceDir, File):
1657 if os.path.isdir(File):
1658 return
1659
1660 sub_dir = os.path.relpath(File, SourceDir)
1661 destination_file = os.path.join(DestDir, sub_dir)
1662 destination_dir = os.path.dirname(destination_file)
1663 CreateDirectory(destination_dir)
1664 try:
1665 CopyFileOnChange(File, destination_dir)
1666 except:
1667 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
1668 return
1669
1670 def CopyModuleToCache(self):
1671 # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList
1672 # and PreMakeHashFileList files
1673 MakeHashStr = None
1674 PreMakeHashStr = None
1675 MakeTimeStamp = 0
1676 PreMakeTimeStamp = 0
1677 Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]
1678 for File in Files:
1679 if ".MakeHashFileList." in File:
1680 #find lastest file through time stamp
1681 FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
1682 if FileTimeStamp > MakeTimeStamp:
1683 MakeTimeStamp = FileTimeStamp
1684 MakeHashStr = File.split('.')[-1]
1685 if len(MakeHashStr) != 32:
1686 EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))
1687 if ".PreMakeHashFileList." in File:
1688 FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
1689 if FileTimeStamp > PreMakeTimeStamp:
1690 PreMakeTimeStamp = FileTimeStamp
1691 PreMakeHashStr = File.split('.')[-1]
1692 if len(PreMakeHashStr) != 32:
1693 EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))
1694
1695 if not MakeHashStr:
1696 EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
1697 return
1698 if not PreMakeHashStr:
1699 EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
1700 return
1701
1702 # Create Cache destination dirs
1703 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
1704 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
1705 CacheFileDir = path.join(FileDir, MakeHashStr)
1706 CacheFfsDir = path.join(FfsDir, MakeHashStr)
1707 CreateDirectory (CacheFileDir)
1708 CreateDirectory (CacheFfsDir)
1709
1710 # Create ModuleHashPair file to support multiple version cache together
1711 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
1712 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1713 if os.path.exists(ModuleHashPair):
1714 with open(ModuleHashPair, 'r') as f:
1715 ModuleHashPairList = json.load(f)
1716 if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):
1717 ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))
1718 with open(ModuleHashPair, 'w') as f:
1719 json.dump(ModuleHashPairList, f, indent=2)
1720
1721 # Copy files to Cache destination dirs
1722 if not self.OutputFile:
1723 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
1724 self.OutputFile = Ma.Binaries
1725 for File in self.OutputFile:
1726 if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):
1727 self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)
1728 else:
1729 if self.Name + ".autogen.hash." in File or \
1730 self.Name + ".autogen.hashchain." in File or \
1731 self.Name + ".hash." in File or \
1732 self.Name + ".hashchain." in File or \
1733 self.Name + ".PreMakeHashFileList." in File or \
1734 self.Name + ".MakeHashFileList." in File:
1735 self.CacheCopyFile(FileDir, self.BuildDir, File)
1736 else:
1737 self.CacheCopyFile(CacheFileDir, self.BuildDir, File)
1738 ## Create makefile for the module and its dependent libraries
1739 #
1740 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1741 # dependent libraries will be created
1742 #
1743 @cached_class_function
1744 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
1745
1746 # nest this function inside it's only caller.
1747 def CreateTimeStamp():
1748 FileSet = {self.MetaFile.Path}
1749
1750 for SourceFile in self.Module.Sources:
1751 FileSet.add (SourceFile.Path)
1752
1753 for Lib in self.DependentLibraryList:
1754 FileSet.add (Lib.MetaFile.Path)
1755
1756 for f in self.AutoGenDepSet:
1757 FileSet.add (f.Path)
1758
1759 if os.path.exists (self.TimeStampPath):
1760 os.remove (self.TimeStampPath)
1761
1762 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
1763
1764 # Ignore generating makefile when it is a binary module
1765 if self.IsBinaryModule:
1766 return
1767
1768 self.GenFfsList = GenFfsList
1769
1770 if not self.IsLibrary and CreateLibraryMakeFile:
1771 for LibraryAutoGen in self.LibraryAutoGenList:
1772 LibraryAutoGen.CreateMakeFile()
1773
1774 # CanSkip uses timestamps to determine build skipping
1775 if self.CanSkip():
1776 return
1777
1778 if len(self.CustomMakefile) == 0:
1779 Makefile = GenMake.ModuleMakefile(self)
1780 else:
1781 Makefile = GenMake.CustomMakefile(self)
1782 if Makefile.Generate():
1783 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
1784 (self.Name, self.Arch))
1785 else:
1786 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
1787 (self.Name, self.Arch))
1788
1789 CreateTimeStamp()
1790
1791 MakefileType = Makefile._FileType
1792 MakefileName = Makefile._FILE_NAME_[MakefileType]
1793 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
1794 FilePath = path.join(self.BuildDir, self.Name + ".makefile")
1795 SaveFileOnChange(FilePath, MakefilePath, False)
1796
1797 def CopyBinaryFiles(self):
1798 for File in self.Module.Binaries:
1799 SrcPath = File.Path
1800 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
1801 CopyLongFilePath(SrcPath, DstPath)
1802 ## Create autogen code for the module and its dependent libraries
1803 #
1804 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1805 # dependent libraries will be created
1806 #
1807 def CreateCodeFile(self, CreateLibraryCodeFile=True):
1808
1809 if self.IsCodeFileCreated:
1810 return
1811
1812 # Need to generate PcdDatabase even PcdDriver is binarymodule
1813 if self.IsBinaryModule and self.PcdIsDriver != '':
1814 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
1815 return
1816 if self.IsBinaryModule:
1817 if self.IsLibrary:
1818 self.CopyBinaryFiles()
1819 return
1820
1821 if not self.IsLibrary and CreateLibraryCodeFile:
1822 for LibraryAutoGen in self.LibraryAutoGenList:
1823 LibraryAutoGen.CreateCodeFile()
1824
1825 self.LibraryAutoGenList
1826 AutoGenList = []
1827 IgoredAutoGenList = []
1828
1829 for File in self.AutoGenFileList:
1830 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
1831 AutoGenList.append(str(File))
1832 else:
1833 IgoredAutoGenList.append(str(File))
1834
1835
1836 for ModuleType in self.DepexList:
1837 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1838 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
1839 continue
1840
1841 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
1842 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
1843
1844 if len(Dpx.PostfixNotation) != 0:
1845 self.DepexGenerated = True
1846
1847 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
1848 AutoGenList.append(str(DpxFile))
1849 else:
1850 IgoredAutoGenList.append(str(DpxFile))
1851
1852 if IgoredAutoGenList == []:
1853 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
1854 (" ".join(AutoGenList), self.Name, self.Arch))
1855 elif AutoGenList == []:
1856 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
1857 (" ".join(IgoredAutoGenList), self.Name, self.Arch))
1858 else:
1859 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
1860 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
1861
1862 self.IsCodeFileCreated = True
1863
1864 return AutoGenList
1865
1866 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1867 @cached_property
1868 def LibraryAutoGenList(self):
1869 RetVal = []
1870 for Library in self.DependentLibraryList:
1871 La = ModuleAutoGen(
1872 self.Workspace,
1873 Library.MetaFile,
1874 self.BuildTarget,
1875 self.ToolChain,
1876 self.Arch,
1877 self.PlatformInfo.MetaFile,
1878 self.DataPipe
1879 )
1880 La.IsLibrary = True
1881 if La not in RetVal:
1882 RetVal.append(La)
1883 for Lib in La.CodaTargetList:
1884 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
1885 return RetVal
1886
1887 def GenCMakeHash(self):
1888 # GenCMakeHash can only be called in --binary-destination
1889 # Never called in multiprocessing and always directly save result in main process,
1890 # so no need remote dict to share the gCMakeHashFile result with main process
1891
1892 DependencyFileSet = set()
1893 # Add AutoGen files
1894 if self.AutoGenFileList:
1895 for File in set(self.AutoGenFileList):
1896 DependencyFileSet.add(File)
1897
1898 # Add Makefile
1899 abspath = path.join(self.BuildDir, self.Name + ".makefile")
1900 try:
1901 with open(LongFilePath(abspath),"r") as fd:
1902 lines = fd.readlines()
1903 except Exception as e:
1904 EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
1905 if lines:
1906 DependencyFileSet.update(lines)
1907
1908 # Caculate all above dependency files hash
1909 # Initialze hash object
1910 FileList = []
1911 m = hashlib.md5()
1912 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
1913 if not path.exists(LongFilePath(str(File))):
1914 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
1915 continue
1916 with open(LongFilePath(str(File)), 'rb') as f:
1917 Content = f.read()
1918 m.update(Content)
1919 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
1920
1921 HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())
1922 GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
1923 try:
1924 with open(LongFilePath(HashChainFile), 'w') as f:
1925 json.dump(FileList, f, indent=2)
1926 except:
1927 EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
1928 return False
1929
1930 def GenModuleHash(self):
1931 # GenModuleHash only called after autogen phase
1932 # Never called in multiprocessing and always directly save result in main process,
1933 # so no need remote dict to share the gModuleHashFile result with main process
1934 #
1935 # GenPreMakefileHashList consume no dict.
1936 # GenPreMakefileHashList produce local gModuleHashFile dict.
1937
1938 DependencyFileSet = set()
1939 # Add Module Meta file
1940 DependencyFileSet.add(self.MetaFile.Path)
1941
1942 # Add Module's source files
1943 if self.SourceFileList:
1944 for File in set(self.SourceFileList):
1945 DependencyFileSet.add(File.Path)
1946
1947 # Add modules's include header files
1948 # Directly use the deps.txt file in the module BuildDir
1949 abspath = path.join(self.BuildDir, "deps.txt")
1950 rt = None
1951 try:
1952 with open(LongFilePath(abspath),"r") as fd:
1953 lines = fd.readlines()
1954 if lines:
1955 rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])
1956 except Exception as e:
1957 EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
1958
1959 if rt:
1960 DependencyFileSet.update(rt)
1961
1962
1963 # Caculate all above dependency files hash
1964 # Initialze hash object
1965 FileList = []
1966 m = hashlib.md5()
1967 BuildDirStr = path.abspath(self.BuildDir).lower()
1968 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
1969 # Skip the AutoGen files in BuildDir which already been
1970 # included in .autogen.hash. file
1971 if BuildDirStr in path.abspath(File).lower():
1972 continue
1973 if not path.exists(LongFilePath(File)):
1974 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
1975 continue
1976 with open(LongFilePath(File), 'rb') as f:
1977 Content = f.read()
1978 m.update(Content)
1979 FileList.append((File, hashlib.md5(Content).hexdigest()))
1980
1981 HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())
1982 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
1983 try:
1984 with open(LongFilePath(HashChainFile), 'w') as f:
1985 json.dump(FileList, f, indent=2)
1986 except:
1987 EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
1988 return False
1989
1990 def GenPreMakefileHashList(self):
1991 # GenPreMakefileHashList consume below dicts:
1992 # gPlatformHashFile
1993 # gPackageHashFile
1994 # gModuleHashFile
1995 # GenPreMakefileHashList produce no dict.
1996 # gModuleHashFile items might be produced in multiprocessing, so
1997 # need check gModuleHashFile remote dict
1998
1999 # skip binary module
2000 if self.IsBinaryModule:
2001 return
2002
2003 FileList = []
2004 m = hashlib.md5()
2005 # Add Platform level hash
2006 HashFile = GlobalData.gPlatformHashFile
2007 if path.exists(LongFilePath(HashFile)):
2008 FileList.append(HashFile)
2009 m.update(HashFile.encode('utf-8'))
2010 else:
2011 EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)
2012
2013 # Add Package level hash
2014 if self.DependentPackageList:
2015 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
2016 if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:
2017 EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))
2018 continue
2019 HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]
2020 if path.exists(LongFilePath(HashFile)):
2021 FileList.append(HashFile)
2022 m.update(HashFile.encode('utf-8'))
2023 else:
2024 EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)
2025
2026 # Add Module self
2027 # GenPreMakefileHashList needed in both --binary-destination
2028 # and --hash. And --hash might save ModuleHashFile in remote dict
2029 # during multiprocessing.
2030 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
2031 HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
2032 else:
2033 EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2034 if path.exists(LongFilePath(HashFile)):
2035 FileList.append(HashFile)
2036 m.update(HashFile.encode('utf-8'))
2037 else:
2038 EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
2039
2040 # Add Library hash
2041 if self.LibraryAutoGenList:
2042 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
2043
2044 if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
2045 HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
2046 else:
2047 EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
2048 if path.exists(LongFilePath(HashFile)):
2049 FileList.append(HashFile)
2050 m.update(HashFile.encode('utf-8'))
2051 else:
2052 EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
2053
2054 # Save PreMakeHashFileList
2055 FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())
2056 try:
2057 with open(LongFilePath(FilePath), 'w') as f:
2058 json.dump(FileList, f, indent=0)
2059 except:
2060 EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)
2061
2062 def GenMakefileHashList(self):
2063 # GenMakefileHashList only need in --binary-destination which will
2064 # everything in local dict. So don't need check remote dict.
2065
2066 # skip binary module
2067 if self.IsBinaryModule:
2068 return
2069
2070 FileList = []
2071 m = hashlib.md5()
2072 # Add AutoGen hash
2073 HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]
2074 if path.exists(LongFilePath(HashFile)):
2075 FileList.append(HashFile)
2076 m.update(HashFile.encode('utf-8'))
2077 else:
2078 EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)
2079
2080 # Add Module self
2081 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
2082 HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
2083 else:
2084 EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2085 if path.exists(LongFilePath(HashFile)):
2086 FileList.append(HashFile)
2087 m.update(HashFile.encode('utf-8'))
2088 else:
2089 EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
2090
2091 # Add Library hash
2092 if self.LibraryAutoGenList:
2093 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
2094 if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
2095 HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
2096 else:
2097 EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
2098 if path.exists(LongFilePath(HashFile)):
2099 FileList.append(HashFile)
2100 m.update(HashFile.encode('utf-8'))
2101 else:
2102 EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
2103
2104 # Save MakeHashFileList
2105 FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())
2106 try:
2107 with open(LongFilePath(FilePath), 'w') as f:
2108 json.dump(FileList, f, indent=0)
2109 except:
2110 EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)
2111
2112 def CheckHashChainFile(self, HashChainFile):
2113 # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'
2114 # The x is module name and the 16BytesHexStr is md5 hexdigest of
2115 # all hashchain files content
2116 HashStr = HashChainFile.split('.')[-1]
2117 if len(HashStr) != 32:
2118 EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))
2119 return False
2120
2121 try:
2122 with open(LongFilePath(HashChainFile), 'r') as f:
2123 HashChainList = json.load(f)
2124 except:
2125 EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)
2126 return False
2127
2128 # Print the different file info
2129 # print(HashChainFile)
2130 for idx, (SrcFile, SrcHash) in enumerate (HashChainList):
2131 if SrcFile in GlobalData.gFileHashDict:
2132 DestHash = GlobalData.gFileHashDict[SrcFile]
2133 else:
2134 try:
2135 with open(LongFilePath(SrcFile), 'rb') as f:
2136 Content = f.read()
2137 DestHash = hashlib.md5(Content).hexdigest()
2138 GlobalData.gFileHashDict[SrcFile] = DestHash
2139 except IOError as X:
2140 # cache miss if SrcFile is removed in new version code
2141 GlobalData.gFileHashDict[SrcFile] = 0
2142 EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
2143 return False
2144 if SrcHash != DestHash:
2145 EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
2146 return False
2147
2148 return True
2149
2150 ## Decide whether we can skip the left autogen and make process
2151 def CanSkipbyMakeCache(self):
2152 # For --binary-source only
2153 # CanSkipbyMakeCache consume below dicts:
2154 # gModuleMakeCacheStatus
2155 # gHashChainStatus
2156 # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.
2157 # all these dicts might be produced in multiprocessing, so
2158 # need check these remote dict
2159
2160 if not GlobalData.gBinCacheSource:
2161 return False
2162
2163 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:
2164 return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
2165
2166 # If Module is binary, which has special build rule, do not skip by cache.
2167 if self.IsBinaryModule:
2168 print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
2169 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2170 return False
2171
2172 # see .inc as binary file, do not skip by hash
2173 for f_ext in self.SourceFileList:
2174 if '.inc' in str(f_ext):
2175 print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
2176 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2177 return False
2178
2179 ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2180 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2181
2182 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2183 ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
2184 try:
2185 with open(LongFilePath(ModuleHashPair), 'r') as f:
2186 ModuleHashPairList = json.load(f)
2187 except:
2188 # ModuleHashPair might not exist for new added module
2189 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2190 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2191 print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
2192 return False
2193
2194 # Check the PreMakeHash in ModuleHashPairList one by one
2195 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2196 SourceHashDir = path.join(ModuleCacheDir, MakeHash)
2197 SourceFfsHashDir = path.join(FfsDir, MakeHash)
2198 PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
2199 MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
2200
2201 try:
2202 with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:
2203 MakeHashFileList = json.load(f)
2204 except:
2205 EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)
2206 continue
2207
2208 HashMiss = False
2209 for HashChainFile in MakeHashFileList:
2210 HashChainStatus = None
2211 if HashChainFile in GlobalData.gHashChainStatus:
2212 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
2213 if HashChainStatus == False:
2214 HashMiss = True
2215 break
2216 elif HashChainStatus == True:
2217 continue
2218 # Convert to path start with cache source dir
2219 RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
2220 NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
2221 if self.CheckHashChainFile(NewFilePath):
2222 GlobalData.gHashChainStatus[HashChainFile] = True
2223 # Save the module self HashFile for GenPreMakefileHashList later usage
2224 if self.Name + ".hashchain." in HashChainFile:
2225 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
2226 else:
2227 GlobalData.gHashChainStatus[HashChainFile] = False
2228 HashMiss = True
2229 break
2230
2231 if HashMiss:
2232 continue
2233
2234 # PreMakefile cache hit, restore the module build result
2235 for root, dir, files in os.walk(SourceHashDir):
2236 for f in files:
2237 File = path.join(root, f)
2238 self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
2239 if os.path.exists(SourceFfsHashDir):
2240 for root, dir, files in os.walk(SourceFfsHashDir):
2241 for f in files:
2242 File = path.join(root, f)
2243 self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
2244
2245 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2246 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2247
2248 print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)
2249 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
2250 return True
2251
2252 print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
2253 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2254 return False
2255
2256 ## Decide whether we can skip the left autogen and make process
2257 def CanSkipbyPreMakeCache(self):
2258 # CanSkipbyPreMakeCache consume below dicts:
2259 # gModulePreMakeCacheStatus
2260 # gHashChainStatus
2261 # gModuleHashFile
2262 # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.
2263 # all these dicts might be produced in multiprocessing, so
2264 # need check these remote dicts
2265
2266 if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:
2267 return False
2268
2269 if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:
2270 return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
2271
2272 # If Module is binary, which has special build rule, do not skip by cache.
2273 if self.IsBinaryModule:
2274 print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
2275 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2276 return False
2277
2278 # see .inc as binary file, do not skip by hash
2279 for f_ext in self.SourceFileList:
2280 if '.inc' in str(f_ext):
2281 print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
2282 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2283 return False
2284
2285 # For --hash only in the incremental build
2286 if not GlobalData.gBinCacheSource:
2287 Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]
2288 PreMakeHashFileList_FilePah = None
2289 MakeTimeStamp = 0
2290 # Find latest PreMakeHashFileList file in self.BuildDir folder
2291 for File in Files:
2292 if ".PreMakeHashFileList." in File:
2293 FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]
2294 if FileTimeStamp > MakeTimeStamp:
2295 MakeTimeStamp = FileTimeStamp
2296 PreMakeHashFileList_FilePah = File
2297 if not PreMakeHashFileList_FilePah:
2298 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2299 return False
2300
2301 try:
2302 with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
2303 PreMakeHashFileList = json.load(f)
2304 except:
2305 EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
2306 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2307 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2308 return False
2309
2310 HashMiss = False
2311 for HashChainFile in PreMakeHashFileList:
2312 HashChainStatus = None
2313 if HashChainFile in GlobalData.gHashChainStatus:
2314 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
2315 if HashChainStatus == False:
2316 HashMiss = True
2317 break
2318 elif HashChainStatus == True:
2319 continue
2320 if self.CheckHashChainFile(HashChainFile):
2321 GlobalData.gHashChainStatus[HashChainFile] = True
2322 # Save the module self HashFile for GenPreMakefileHashList later usage
2323 if self.Name + ".hashchain." in HashChainFile:
2324 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
2325 else:
2326 GlobalData.gHashChainStatus[HashChainFile] = False
2327 HashMiss = True
2328 break
2329
2330 if HashMiss:
2331 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2332 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2333 return False
2334 else:
2335 print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2336 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
2337 return True
2338
2339 ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2340 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2341
2342 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2343 ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
2344 try:
2345 with open(LongFilePath(ModuleHashPair), 'r') as f:
2346 ModuleHashPairList = json.load(f)
2347 except:
2348 # ModuleHashPair might not exist for new added module
2349 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2350 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2351 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2352 return False
2353
2354 # Check the PreMakeHash in ModuleHashPairList one by one
2355 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2356 SourceHashDir = path.join(ModuleCacheDir, MakeHash)
2357 SourceFfsHashDir = path.join(FfsDir, MakeHash)
2358 PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
2359 MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
2360
2361 try:
2362 with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
2363 PreMakeHashFileList = json.load(f)
2364 except:
2365 EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
2366 continue
2367
2368 HashMiss = False
2369 for HashChainFile in PreMakeHashFileList:
2370 HashChainStatus = None
2371 if HashChainFile in GlobalData.gHashChainStatus:
2372 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
2373 if HashChainStatus == False:
2374 HashMiss = True
2375 break
2376 elif HashChainStatus == True:
2377 continue
2378 # Convert to path start with cache source dir
2379 RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
2380 NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
2381 if self.CheckHashChainFile(NewFilePath):
2382 GlobalData.gHashChainStatus[HashChainFile] = True
2383 else:
2384 GlobalData.gHashChainStatus[HashChainFile] = False
2385 HashMiss = True
2386 break
2387
2388 if HashMiss:
2389 continue
2390
2391 # PreMakefile cache hit, restore the module build result
2392 for root, dir, files in os.walk(SourceHashDir):
2393 for f in files:
2394 File = path.join(root, f)
2395 self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
2396 if os.path.exists(SourceFfsHashDir):
2397 for root, dir, files in os.walk(SourceFfsHashDir):
2398 for f in files:
2399 File = path.join(root, f)
2400 self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
2401
2402 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2403 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2404
2405 print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2406 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
2407 return True
2408
2409 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2410 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2411 return False
2412
2413 ## Decide whether we can skip the Module build
2414 def CanSkipbyCache(self, gHitSet):
2415 # Hashing feature is off
2416 if not GlobalData.gBinCacheSource:
2417 return False
2418
2419 if self in gHitSet:
2420 return True
2421
2422 return False
2423
2424 ## Decide whether we can skip the ModuleAutoGen process
2425 # If any source file is newer than the module than we cannot skip
2426 #
2427 def CanSkip(self):
2428 # Don't skip if cache feature enabled
2429 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
2430 return False
2431 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
2432 return True
2433 if not os.path.exists(self.TimeStampPath):
2434 return False
2435 #last creation time of the module
2436 DstTimeStamp = os.stat(self.TimeStampPath)[8]
2437
2438 SrcTimeStamp = self.Workspace._SrcTimeStamp
2439 if SrcTimeStamp > DstTimeStamp:
2440 return False
2441
2442 with open(self.TimeStampPath,'r') as f:
2443 for source in f:
2444 source = source.rstrip('\n')
2445 if not os.path.exists(source):
2446 return False
2447 if source not in ModuleAutoGen.TimeDict :
2448 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
2449 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
2450 return False
2451 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
2452 return True
2453
2454 @cached_property
2455 def TimeStampPath(self):
2456 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')