]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
BaseTools: Remove RVCT support
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 from AutoGen.AutoGen import AutoGen
9 from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath
10 from Common.BuildToolError import *
11 from Common.DataType import *
12 from Common.Misc import *
13 from Common.StringUtils import NormPath,GetSplitList
14 from collections import defaultdict
15 from Workspace.WorkspaceCommon import OrderedListDict
16 import os.path as path
17 import copy
18 import hashlib
19 from . import InfSectionParser
20 from . import GenC
21 from . import GenMake
22 from . import GenDepex
23 from io import BytesIO
24 from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
25 from Workspace.MetaFileCommentParser import UsageList
26 from .GenPcdDb import CreatePcdDatabaseCode
27 from Common.caching import cached_class_function
28 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
29 import json
30 import tempfile
31
32 ## Mapping Makefile type
33 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
34 #
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC
36 # is the former use /I , the Latter used -I to specify include directories
37 #
38 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
39 gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
40
41 ## default file name for AutoGen
42 gAutoGenCodeFileName = "AutoGen.c"
43 gAutoGenHeaderFileName = "AutoGen.h"
44 gAutoGenStringFileName = "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName = "%(module_name)s.depex"
47 gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
49 gInfSpecVersion = "0x00010017"
50
51 #
52 # Match name = variable
53 #
54 gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
55 #
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
58 #
59 gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
60
61 #
62 # Template string to generic AsBuilt INF
63 #
64 gAsBuiltInfHeaderString = TemplateString("""${header_comments}
65
66 # DO NOT EDIT
67 # FILE auto-generated
68
69 [Defines]
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
91
92 [Packages.${module_arch}]${BEGIN}
93 ${package_item}${END}
94
95 [Binaries.${module_arch}]${BEGIN}
96 ${binary_item}${END}
97
98 [PatchPcd.${module_arch}]${BEGIN}
99 ${patchablepcd_item}
100 ${END}
101
102 [Protocols.${module_arch}]${BEGIN}
103 ${protocol_item}
104 ${END}
105
106 [Ppis.${module_arch}]${BEGIN}
107 ${ppi_item}
108 ${END}
109
110 [Guids.${module_arch}]${BEGIN}
111 ${guid_item}
112 ${END}
113
114 [PcdEx.${module_arch}]${BEGIN}
115 ${pcd_item}
116 ${END}
117
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
121
122 ${depexsection_item}
123
124 ${userextension_tianocore_item}
125
126 ${tail_comments}
127
128 [BuildOptions.${module_arch}]
129 ## @AsBuilt${BEGIN}
130 ## ${flags_item}${END}
131 """)
132 #
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
135 #
136 def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
137 for Key in CopyFromDict:
138 CopyToDict[Key].extend(CopyFromDict[Key])
139
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList):
142 RetVal = path.join(*PathList)
143 CreateDirectory(RetVal)
144 return RetVal
145
146 #
147 # Convert string to C format array
148 #
149 def _ConvertStringToByteArray(Value):
150 Value = Value.strip()
151 if not Value:
152 return None
153 if Value[0] == '{':
154 if not Value.endswith('}'):
155 return None
156 Value = Value.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields = Value.split(',')
158 try:
159 for Index in range(len(ValFields)):
160 ValFields[Index] = str(int(ValFields[Index], 0))
161 except ValueError:
162 return None
163 Value = '{' + ','.join(ValFields) + '}'
164 return Value
165
166 Unicode = False
167 if Value.startswith('L"'):
168 if not Value.endswith('"'):
169 return None
170 Value = Value[1:]
171 Unicode = True
172 elif not Value.startswith('"') or not Value.endswith('"'):
173 return None
174
175 Value = eval(Value) # translate escape character
176 NewValue = '{'
177 for Index in range(0, len(Value)):
178 if Unicode:
179 NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
180 else:
181 NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
182 Value = NewValue + '0}'
183 return Value
184
185 ## ModuleAutoGen class
186 #
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
190 #
191 class ModuleAutoGen(AutoGen):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
194 if not hasattr(self, "_Init"):
195 self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
196 self._Init = True
197
198 ## Cache the timestamps of metafiles of every module in a class attribute
199 #
200 TimeDict = {}
201
202 def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
205 EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
206 % (MetaFile, Arch))
207 return None
208 return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
209
210 ## Initialize ModuleAutoGen
211 #
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
218 #
219 def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
220 EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
221 GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
222
223 self.Workspace = Workspace
224 self.WorkspaceDir = ""
225 self.PlatformInfo = None
226 self.DataPipe = DataPipe
227 self.__init_platform_info__()
228 self.MetaFile = ModuleFile
229 self.SourceDir = self.MetaFile.SubDir
230 self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
231
232 self.ToolChain = Toolchain
233 self.BuildTarget = Target
234 self.Arch = Arch
235 self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
236 self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
237
238 self.IsCodeFileCreated = False
239 self.IsAsBuiltInfCreated = False
240 self.DepexGenerated = False
241
242 self.BuildDatabase = self.Workspace.BuildDatabase
243 self.BuildRuleOrder = None
244 self.BuildTime = 0
245
246 self._GuidComments = OrderedListDict()
247 self._ProtocolComments = OrderedListDict()
248 self._PpiComments = OrderedListDict()
249 self._BuildTargets = None
250 self._IntroBuildTargetList = None
251 self._FinalBuildTargetList = None
252 self._FileTypes = None
253
254 self.AutoGenDepSet = set()
255 self.ReferenceModules = []
256 self.ConstPcd = {}
257 self.FileDependCache = {}
258
259 def __init_platform_info__(self):
260 pinfo = self.DataPipe.Get("P_Info")
261 self.WorkspaceDir = pinfo.get("WorkspaceDir")
262 self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
263 ## hash() operator of ModuleAutoGen
264 #
265 # The module file path and arch string will be used to represent
266 # hash value of this object
267 #
268 # @retval int Hash value of the module file path and arch
269 #
270 @cached_class_function
271 def __hash__(self):
272 return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))
273 def __repr__(self):
274 return "%s [%s]" % (self.MetaFile, self.Arch)
275
276 # Get FixedAtBuild Pcds of this Module
277 @cached_property
278 def FixedAtBuildPcds(self):
279 RetVal = []
280 for Pcd in self.ModulePcdList:
281 if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
282 continue
283 if Pcd not in RetVal:
284 RetVal.append(Pcd)
285 return RetVal
286
287 @cached_property
288 def FixedVoidTypePcds(self):
289 RetVal = {}
290 for Pcd in self.FixedAtBuildPcds:
291 if Pcd.DatumType == TAB_VOID:
292 if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
293 RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
294 return RetVal
295
296 @property
297 def UniqueBaseName(self):
298 ModuleNames = self.DataPipe.Get("M_Name")
299 if not ModuleNames:
300 return self.Name
301 return ModuleNames.get((self.Name,self.MetaFile),self.Name)
302
303 # Macros could be used in build_rule.txt (also Makefile)
304 @cached_property
305 def Macros(self):
306 return OrderedDict((
307 ("WORKSPACE" ,self.WorkspaceDir),
308 ("MODULE_NAME" ,self.Name),
309 ("MODULE_NAME_GUID" ,self.UniqueBaseName),
310 ("MODULE_GUID" ,self.Guid),
311 ("MODULE_VERSION" ,self.Version),
312 ("MODULE_TYPE" ,self.ModuleType),
313 ("MODULE_FILE" ,str(self.MetaFile)),
314 ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
315 ("MODULE_RELATIVE_DIR" ,self.SourceDir),
316 ("MODULE_DIR" ,self.SourceDir),
317 ("BASE_NAME" ,self.Name),
318 ("ARCH" ,self.Arch),
319 ("TOOLCHAIN" ,self.ToolChain),
320 ("TOOLCHAIN_TAG" ,self.ToolChain),
321 ("TOOL_CHAIN_TAG" ,self.ToolChain),
322 ("TARGET" ,self.BuildTarget),
323 ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
324 ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
325 ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
326 ("MODULE_BUILD_DIR" ,self.BuildDir),
327 ("OUTPUT_DIR" ,self.OutputDir),
328 ("DEBUG_DIR" ,self.DebugDir),
329 ("DEST_DIR_OUTPUT" ,self.OutputDir),
330 ("DEST_DIR_DEBUG" ,self.DebugDir),
331 ("PLATFORM_NAME" ,self.PlatformInfo.Name),
332 ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
333 ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
334 ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
335 ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
336 ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
337 ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
338 ))
339
340 ## Return the module build data object
341 @cached_property
342 def Module(self):
343 return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
344
345 ## Return the module name
346 @cached_property
347 def Name(self):
348 return self.Module.BaseName
349
350 ## Return the module DxsFile if exist
351 @cached_property
352 def DxsFile(self):
353 return self.Module.DxsFile
354
355 ## Return the module meta-file GUID
356 @cached_property
357 def Guid(self):
358 #
359 # To build same module more than once, the module path with FILE_GUID overridden has
360 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
361 # in DSC. The overridden GUID can be retrieved from file name
362 #
363 if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
364 #
365 # Length of GUID is 36
366 #
367 return os.path.basename(self.MetaFile.Path)[:36]
368 return self.Module.Guid
369
370 ## Return the module version
371 @cached_property
372 def Version(self):
373 return self.Module.Version
374
375 ## Return the module type
376 @cached_property
377 def ModuleType(self):
378 return self.Module.ModuleType
379
380 ## Return the component type (for Edk.x style of module)
381 @cached_property
382 def ComponentType(self):
383 return self.Module.ComponentType
384
385 ## Return the build type
386 @cached_property
387 def BuildType(self):
388 return self.Module.BuildType
389
390 ## Return the PCD_IS_DRIVER setting
391 @cached_property
392 def PcdIsDriver(self):
393 return self.Module.PcdIsDriver
394
395 ## Return the autogen version, i.e. module meta-file version
396 @cached_property
397 def AutoGenVersion(self):
398 return self.Module.AutoGenVersion
399
400 ## Check if the module is library or not
401 @cached_property
402 def IsLibrary(self):
403 return bool(self.Module.LibraryClass)
404
405 ## Check if the module is binary module or not
406 @cached_property
407 def IsBinaryModule(self):
408 return self.Module.IsBinaryModule
409
410 ## Return the directory to store intermediate files of the module
411 @cached_property
412 def BuildDir(self):
413 return _MakeDir((
414 self.PlatformInfo.BuildDir,
415 self.Arch,
416 self.SourceDir,
417 self.MetaFile.BaseName
418 ))
419
420 ## Return the directory to store the intermediate object files of the module
421 @cached_property
422 def OutputDir(self):
423 return _MakeDir((self.BuildDir, "OUTPUT"))
424
425 ## Return the directory path to store ffs file
426 @cached_property
427 def FfsOutputDir(self):
428 if GlobalData.gFdfParser:
429 return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
430 return ''
431
432 ## Return the directory to store auto-gened source files of the module
433 @cached_property
434 def DebugDir(self):
435 return _MakeDir((self.BuildDir, "DEBUG"))
436
437 ## Return the path of custom file
438 @cached_property
439 def CustomMakefile(self):
440 RetVal = {}
441 for Type in self.Module.CustomMakefile:
442 MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
443 File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
444 RetVal[MakeType] = File
445 return RetVal
446
447 ## Return the directory of the makefile
448 #
449 # @retval string The directory string of module's makefile
450 #
451 @cached_property
452 def MakeFileDir(self):
453 return self.BuildDir
454
455 ## Return build command string
456 #
457 # @retval string Build command string
458 #
459 @cached_property
460 def BuildCommand(self):
461 return self.PlatformInfo.BuildCommand
462
463 ## Get Module package and Platform package
464 #
465 # @retval list The list of package object
466 #
467 @cached_property
468 def PackageList(self):
469 PkagList = []
470 if self.Module.Packages:
471 PkagList.extend(self.Module.Packages)
472 Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
473 for Package in Platform.Packages:
474 if Package in PkagList:
475 continue
476 PkagList.append(Package)
477 return PkagList
478
479 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
480 #
481 # @retval list The list of package object
482 #
483 @cached_property
484 def DerivedPackageList(self):
485 PackageList = []
486 PackageList.extend(self.PackageList)
487 for M in self.DependentLibraryList:
488 for Package in M.Packages:
489 if Package in PackageList:
490 continue
491 PackageList.append(Package)
492 return PackageList
493
494 ## Get the depex string
495 #
496 # @return : a string contain all depex expression.
497 def _GetDepexExpresionString(self):
498 DepexStr = ''
499 DepexList = []
500 ## DPX_SOURCE IN Define section.
501 if self.Module.DxsFile:
502 return DepexStr
503 for M in [self.Module] + self.DependentLibraryList:
504 Filename = M.MetaFile.Path
505 InfObj = InfSectionParser.InfSectionParser(Filename)
506 DepexExpressionList = InfObj.GetDepexExpresionList()
507 for DepexExpression in DepexExpressionList:
508 for key in DepexExpression:
509 Arch, ModuleType = key
510 DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
511 # the type of build module is USER_DEFINED.
512 # All different DEPEX section tags would be copied into the As Built INF file
513 # and there would be separate DEPEX section tags
514 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
515 if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
516 DepexList.append({(Arch, ModuleType): DepexExpr})
517 else:
518 if Arch.upper() == TAB_ARCH_COMMON or \
519 (Arch.upper() == self.Arch.upper() and \
520 ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
521 DepexList.append({(Arch, ModuleType): DepexExpr})
522
523 #the type of build module is USER_DEFINED.
524 if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
525 for Depex in DepexList:
526 for key in Depex:
527 DepexStr += '[Depex.%s.%s]\n' % key
528 DepexStr += '\n'.join('# '+ val for val in Depex[key])
529 DepexStr += '\n\n'
530 if not DepexStr:
531 return '[Depex.%s]\n' % self.Arch
532 return DepexStr
533
534 #the type of build module not is USER_DEFINED.
535 Count = 0
536 for Depex in DepexList:
537 Count += 1
538 if DepexStr != '':
539 DepexStr += ' AND '
540 DepexStr += '('
541 for D in Depex.values():
542 DepexStr += ' '.join(val for val in D)
543 Index = DepexStr.find('END')
544 if Index > -1 and Index == len(DepexStr) - 3:
545 DepexStr = DepexStr[:-3]
546 DepexStr = DepexStr.strip()
547 DepexStr += ')'
548 if Count == 1:
549 DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
550 if not DepexStr:
551 return '[Depex.%s]\n' % self.Arch
552 return '[Depex.%s]\n# ' % self.Arch + DepexStr
553
554 ## Merge dependency expression
555 #
556 # @retval list The token list of the dependency expression after parsed
557 #
558 @cached_property
559 def DepexList(self):
560 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
561 return {}
562
563 DepexList = []
564 #
565 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
566 #
567 FixedVoidTypePcds = {}
568 for M in [self] + self.LibraryAutoGenList:
569 FixedVoidTypePcds.update(M.FixedVoidTypePcds)
570 for M in [self] + self.LibraryAutoGenList:
571 Inherited = False
572 for D in M.Module.Depex[self.Arch, self.ModuleType]:
573 if DepexList != []:
574 DepexList.append('AND')
575 DepexList.append('(')
576 #replace D with value if D is FixedAtBuild PCD
577 NewList = []
578 for item in D:
579 if '.' not in item:
580 NewList.append(item)
581 else:
582 try:
583 Value = FixedVoidTypePcds[item]
584 if len(Value.split(',')) != 16:
585 EdkLogger.error("build", FORMAT_INVALID,
586 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
587 NewList.append(Value)
588 except:
589 EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
590
591 DepexList.extend(NewList)
592 if DepexList[-1] == 'END': # no need of a END at this time
593 DepexList.pop()
594 DepexList.append(')')
595 Inherited = True
596 if Inherited:
597 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
598 if 'BEFORE' in DepexList or 'AFTER' in DepexList:
599 break
600 if len(DepexList) > 0:
601 EdkLogger.verbose('')
602 return {self.ModuleType:DepexList}
603
604 ## Merge dependency expression
605 #
606 # @retval list The token list of the dependency expression after parsed
607 #
608 @cached_property
609 def DepexExpressionDict(self):
610 if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
611 return {}
612
613 DepexExpressionString = ''
614 #
615 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
616 #
617 for M in [self.Module] + self.DependentLibraryList:
618 Inherited = False
619 for D in M.DepexExpression[self.Arch, self.ModuleType]:
620 if DepexExpressionString != '':
621 DepexExpressionString += ' AND '
622 DepexExpressionString += '('
623 DepexExpressionString += D
624 DepexExpressionString = DepexExpressionString.rstrip('END').strip()
625 DepexExpressionString += ')'
626 Inherited = True
627 if Inherited:
628 EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
629 if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
630 break
631 if len(DepexExpressionString) > 0:
632 EdkLogger.verbose('')
633
634 return {self.ModuleType:DepexExpressionString}
635
636 # Get the tiano core user extension, it is contain dependent library.
637 # @retval: a list contain tiano core userextension.
638 #
639 def _GetTianoCoreUserExtensionList(self):
640 TianoCoreUserExtentionList = []
641 for M in [self.Module] + self.DependentLibraryList:
642 Filename = M.MetaFile.Path
643 InfObj = InfSectionParser.InfSectionParser(Filename)
644 TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
645 for TianoCoreUserExtent in TianoCoreUserExtenList:
646 for Section in TianoCoreUserExtent:
647 ItemList = Section.split(TAB_SPLIT)
648 Arch = self.Arch
649 if len(ItemList) == 4:
650 Arch = ItemList[3]
651 if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
652 TianoCoreList = []
653 TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
654 TianoCoreList.extend(TianoCoreUserExtent[Section][:])
655 TianoCoreList.append('\n')
656 TianoCoreUserExtentionList.append(TianoCoreList)
657
658 return TianoCoreUserExtentionList
659
660 ## Return the list of specification version required for the module
661 #
662 # @retval list The list of specification defined in module file
663 #
664 @cached_property
665 def Specification(self):
666 return self.Module.Specification
667
668 ## Tool option for the module build
669 #
670 # @param PlatformInfo The object of PlatformBuildInfo
671 # @retval dict The dict containing valid options
672 #
673 @cached_property
674 def BuildOption(self):
675 RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
676 if self.BuildRuleOrder:
677 self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
678 return RetVal
679
680 ## Get include path list from tool option for the module build
681 #
682 # @retval list The include path list
683 #
684 @cached_property
685 def BuildOptionIncPathList(self):
686 #
687 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC
688 # is the former use /I , the Latter used -I to specify include directories
689 #
690 if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
691 BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
692 elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC'):
693 BuildOptIncludeRegEx = gBuildOptIncludePatternOther
694 else:
695 #
696 # New ToolChainFamily, don't known whether there is option to specify include directories
697 #
698 return []
699
700 RetVal = []
701 for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
702 try:
703 FlagOption = self.BuildOption[Tool]['FLAGS']
704 except KeyError:
705 FlagOption = ''
706
707 IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
708
709 #
710 # EDK II modules must not reference header files outside of the packages they depend on or
711 # within the module's directory tree. Report error if violation.
712 #
713 if GlobalData.gDisableIncludePathCheck == False:
714 for Path in IncPathList:
715 if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
716 ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
717 EdkLogger.error("build",
718 PARAMETER_INVALID,
719 ExtraData=ErrMsg,
720 File=str(self.MetaFile))
721 RetVal += IncPathList
722 return RetVal
723
724 ## Return a list of files which can be built from source
725 #
726 # What kind of files can be built is determined by build rules in
727 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
728 #
729 @cached_property
730 def SourceFileList(self):
731 RetVal = []
732 ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
733 ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
734 for F in self.Module.Sources:
735 # match tool chain
736 if F.TagName not in ToolChainTagSet:
737 EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
738 "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
739 continue
740 # match tool chain family or build rule family
741 if F.ToolChainFamily not in ToolChainFamilySet:
742 EdkLogger.debug(
743 EdkLogger.DEBUG_0,
744 "The file [%s] must be built by tools of [%s], " \
745 "but current toolchain family is [%s], buildrule family is [%s]" \
746 % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
747 continue
748
749 # add the file path into search path list for file including
750 if F.Dir not in self.IncludePathList:
751 self.IncludePathList.insert(0, F.Dir)
752 RetVal.append(F)
753
754 self._MatchBuildRuleOrder(RetVal)
755
756 for F in RetVal:
757 self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
758 return RetVal
759
760 def _MatchBuildRuleOrder(self, FileList):
761 Order_Dict = {}
762 self.BuildOption
763 for SingleFile in FileList:
764 if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
765 key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
766 if key in Order_Dict:
767 Order_Dict[key].append(SingleFile.Ext)
768 else:
769 Order_Dict[key] = [SingleFile.Ext]
770
771 RemoveList = []
772 for F in Order_Dict:
773 if len(Order_Dict[F]) > 1:
774 Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
775 for Ext in Order_Dict[F][1:]:
776 RemoveList.append(F + Ext)
777
778 for item in RemoveList:
779 FileList.remove(item)
780
781 return FileList
782
783 ## Return the list of unicode files
784 @cached_property
785 def UnicodeFileList(self):
786 return self.FileTypes.get(TAB_UNICODE_FILE,[])
787
788 ## Return the list of vfr files
789 @cached_property
790 def VfrFileList(self):
791 return self.FileTypes.get(TAB_VFR_FILE, [])
792
793 ## Return the list of Image Definition files
794 @cached_property
795 def IdfFileList(self):
796 return self.FileTypes.get(TAB_IMAGE_FILE,[])
797
798 ## Return a list of files which can be built from binary
799 #
800 # "Build" binary files are just to copy them to build directory.
801 #
802 # @retval list The list of files which can be built later
803 #
804 @cached_property
805 def BinaryFileList(self):
806 RetVal = []
807 for F in self.Module.Binaries:
808 if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
809 continue
810 RetVal.append(F)
811 self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
812 return RetVal
813
814 @cached_property
815 def BuildRules(self):
816 RetVal = {}
817 BuildRuleDatabase = self.PlatformInfo.BuildRule
818 for Type in BuildRuleDatabase.FileTypeList:
819 #first try getting build rule by BuildRuleFamily
820 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
821 if not RuleObject:
822 # build type is always module type, but ...
823 if self.ModuleType != self.BuildType:
824 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
825 #second try getting build rule by ToolChainFamily
826 if not RuleObject:
827 RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
828 if not RuleObject:
829 # build type is always module type, but ...
830 if self.ModuleType != self.BuildType:
831 RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
832 if not RuleObject:
833 continue
834 RuleObject = RuleObject.Instantiate(self.Macros)
835 RetVal[Type] = RuleObject
836 for Ext in RuleObject.SourceFileExtList:
837 RetVal[Ext] = RuleObject
838 return RetVal
839
840 def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
841 if self._BuildTargets is None:
842 self._IntroBuildTargetList = set()
843 self._FinalBuildTargetList = set()
844 self._BuildTargets = defaultdict(set)
845 self._FileTypes = defaultdict(set)
846
847 if not BinaryFileList:
848 BinaryFileList = self.BinaryFileList
849
850 SubDirectory = os.path.join(self.OutputDir, File.SubDir)
851 if not os.path.exists(SubDirectory):
852 CreateDirectory(SubDirectory)
853 TargetList = set()
854 FinalTargetName = set()
855 RuleChain = set()
856 SourceList = [File]
857 Index = 0
858 #
859 # Make sure to get build rule order value
860 #
861 self.BuildOption
862
863 while Index < len(SourceList):
864 # Reset the FileType if not the first iteration.
865 if Index > 0:
866 FileType = TAB_UNKNOWN_FILE
867 Source = SourceList[Index]
868 Index = Index + 1
869
870 if Source != File:
871 CreateDirectory(Source.Dir)
872
873 if File.IsBinary and File == Source and File in BinaryFileList:
874 # Skip all files that are not binary libraries
875 if not self.IsLibrary:
876 continue
877 RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
878 elif FileType in self.BuildRules:
879 RuleObject = self.BuildRules[FileType]
880 elif Source.Ext in self.BuildRules:
881 RuleObject = self.BuildRules[Source.Ext]
882 else:
883 # No more rule to apply: Source is a final target.
884 FinalTargetName.add(Source)
885 continue
886
887 FileType = RuleObject.SourceFileType
888 self._FileTypes[FileType].add(Source)
889
890 # stop at STATIC_LIBRARY for library
891 if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
892 FinalTargetName.add(Source)
893 continue
894
895 Target = RuleObject.Apply(Source, self.BuildRuleOrder)
896 if not Target:
897 # No Target: Source is a final target.
898 FinalTargetName.add(Source)
899 continue
900
901 TargetList.add(Target)
902 self._BuildTargets[FileType].add(Target)
903
904 if not Source.IsBinary and Source == File:
905 self._IntroBuildTargetList.add(Target)
906
907 # to avoid cyclic rule
908 if FileType in RuleChain:
909 EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))
910
911 RuleChain.add(FileType)
912 SourceList.extend(Target.Outputs)
913
914 # For each final target name, retrieve the corresponding TargetDescBlock instance.
915 for FTargetName in FinalTargetName:
916 for Target in TargetList:
917 if FTargetName == Target.Target:
918 self._FinalBuildTargetList.add(Target)
919
920 @cached_property
921 def Targets(self):
922 if self._BuildTargets is None:
923 self._IntroBuildTargetList = set()
924 self._FinalBuildTargetList = set()
925 self._BuildTargets = defaultdict(set)
926 self._FileTypes = defaultdict(set)
927
928 #TRICK: call SourceFileList property to apply build rule for source files
929 self.SourceFileList
930
931 #TRICK: call _GetBinaryFileList to apply build rule for binary files
932 self.BinaryFileList
933
934 return self._BuildTargets
935
936 @cached_property
937 def IntroTargetList(self):
938 self.Targets
939 return self._IntroBuildTargetList
940
941 @cached_property
942 def CodaTargetList(self):
943 self.Targets
944 return self._FinalBuildTargetList
945
946 @cached_property
947 def FileTypes(self):
948 self.Targets
949 return self._FileTypes
950
951 ## Get the list of package object the module depends on and the Platform depends on
952 #
953 # @retval list The package object list
954 #
955 @cached_property
956 def DependentPackageList(self):
957 return self.PackageList
958
959 ## Return the list of auto-generated code file
960 #
961 # @retval list The list of auto-generated file
962 #
963 @cached_property
964 def AutoGenFileList(self):
965 AutoGenUniIdf = self.BuildType != 'UEFI_HII'
966 UniStringBinBuffer = BytesIO()
967 IdfGenBinBuffer = BytesIO()
968 RetVal = {}
969 AutoGenC = TemplateString()
970 AutoGenH = TemplateString()
971 StringH = TemplateString()
972 StringIdf = TemplateString()
973 GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
974 #
975 # AutoGen.c is generated if there are library classes in inf, or there are object files
976 #
977 if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
978 or TAB_OBJECT_FILE in self.FileTypes):
979 AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
980 RetVal[AutoFile] = str(AutoGenC)
981 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
982 if str(AutoGenH) != "":
983 AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
984 RetVal[AutoFile] = str(AutoGenH)
985 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
986 if str(StringH) != "":
987 AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
988 RetVal[AutoFile] = str(StringH)
989 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
990 if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
991 AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
992 RetVal[AutoFile] = UniStringBinBuffer.getvalue()
993 AutoFile.IsBinary = True
994 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
995 if UniStringBinBuffer is not None:
996 UniStringBinBuffer.close()
997 if str(StringIdf) != "":
998 AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
999 RetVal[AutoFile] = str(StringIdf)
1000 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1001 if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
1002 AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
1003 RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
1004 AutoFile.IsBinary = True
1005 self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
1006 if IdfGenBinBuffer is not None:
1007 IdfGenBinBuffer.close()
1008 return RetVal
1009
1010 ## Return the list of library modules explicitly or implicitly used by this module
1011 @cached_property
1012 def DependentLibraryList(self):
1013 # only merge library classes and PCD for non-library module
1014 if self.IsLibrary:
1015 return []
1016 return self.PlatformInfo.ApplyLibraryInstance(self.Module)
1017
1018 ## Get the list of PCDs from current module
1019 #
1020 # @retval list The list of PCD
1021 #
1022 @cached_property
1023 def ModulePcdList(self):
1024 # apply PCD settings from platform
1025 RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)
1026
1027 return RetVal
1028 @cached_property
1029 def _PcdComments(self):
1030 ReVal = OrderedListDict()
1031 ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
1032 if not self.IsLibrary:
1033 for Library in self.DependentLibraryList:
1034 ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
1035 return ReVal
1036
1037 ## Get the list of PCDs from dependent libraries
1038 #
1039 # @retval list The list of PCD
1040 #
1041 @cached_property
1042 def LibraryPcdList(self):
1043 if self.IsLibrary:
1044 return []
1045 RetVal = []
1046 Pcds = set()
1047 # get PCDs from dependent libraries
1048 for Library in self.DependentLibraryList:
1049 PcdsInLibrary = OrderedDict()
1050 for Key in Library.Pcds:
1051 # skip duplicated PCDs
1052 if Key in self.Module.Pcds or Key in Pcds:
1053 continue
1054 Pcds.add(Key)
1055 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
1056 RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))
1057 return RetVal
1058
1059 ## Get the GUID value mapping
1060 #
1061 # @retval dict The mapping between GUID cname and its value
1062 #
1063 @cached_property
1064 def GuidList(self):
1065 RetVal = self.Module.Guids
1066 for Library in self.DependentLibraryList:
1067 RetVal.update(Library.Guids)
1068 ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
1069 ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
1070 return RetVal
1071
1072 @cached_property
1073 def GetGuidsUsedByPcd(self):
1074 RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
1075 for Library in self.DependentLibraryList:
1076 RetVal.update(Library.GetGuidsUsedByPcd())
1077 return RetVal
1078 ## Get the protocol value mapping
1079 #
1080 # @retval dict The mapping between protocol cname and its value
1081 #
1082 @cached_property
1083 def ProtocolList(self):
1084 RetVal = OrderedDict(self.Module.Protocols)
1085 for Library in self.DependentLibraryList:
1086 RetVal.update(Library.Protocols)
1087 ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
1088 ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
1089 return RetVal
1090
1091 ## Get the PPI value mapping
1092 #
1093 # @retval dict The mapping between PPI cname and its value
1094 #
1095 @cached_property
1096 def PpiList(self):
1097 RetVal = OrderedDict(self.Module.Ppis)
1098 for Library in self.DependentLibraryList:
1099 RetVal.update(Library.Ppis)
1100 ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
1101 ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
1102 return RetVal
1103
1104 ## Get the list of include search path
1105 #
1106 # @retval list The list path
1107 #
1108 @cached_property
1109 def IncludePathList(self):
1110 RetVal = []
1111 RetVal.append(self.MetaFile.Dir)
1112 RetVal.append(self.DebugDir)
1113
1114 for Package in self.PackageList:
1115 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1116 if PackageDir not in RetVal:
1117 RetVal.append(PackageDir)
1118 IncludesList = Package.Includes
1119 if Package._PrivateIncludes:
1120 if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
1121 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1122 for Inc in IncludesList:
1123 if Inc not in RetVal:
1124 RetVal.append(str(Inc))
1125 RetVal.extend(self.IncPathFromBuildOptions)
1126 return RetVal
1127
1128 @cached_property
1129 def IncPathFromBuildOptions(self):
1130 IncPathList = []
1131 for tool in self.BuildOption:
1132 if 'FLAGS' in self.BuildOption[tool]:
1133 flags = self.BuildOption[tool]['FLAGS']
1134 whitespace = False
1135 for flag in flags.split(" "):
1136 flag = flag.strip()
1137 if flag.startswith(("/I","-I")):
1138 if len(flag)>2:
1139 if os.path.exists(flag[2:]):
1140 IncPathList.append(flag[2:])
1141 else:
1142 whitespace = True
1143 continue
1144 if whitespace and flag:
1145 if os.path.exists(flag):
1146 IncPathList.append(flag)
1147 whitespace = False
1148 return IncPathList
1149
1150 @cached_property
1151 def IncludePathLength(self):
1152 return sum(len(inc)+1 for inc in self.IncludePathList)
1153
1154 ## Get the list of include paths from the packages
1155 #
1156 # @IncludesList list The list path
1157 #
1158 @cached_property
1159 def PackageIncludePathList(self):
1160 IncludesList = []
1161 for Package in self.PackageList:
1162 PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
1163 IncludesList = Package.Includes
1164 if Package._PrivateIncludes:
1165 if not self.MetaFile.Path.startswith(PackageDir):
1166 IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
1167 return IncludesList
1168
1169 ## Get HII EX PCDs which maybe used by VFR
1170 #
1171 # efivarstore used by VFR may relate with HII EX PCDs
1172 # Get the variable name and GUID from efivarstore and HII EX PCD
1173 # List the HII EX PCDs in As Built INF if both name and GUID match.
1174 #
1175 # @retval list HII EX PCDs
1176 #
1177 def _GetPcdsMaybeUsedByVfr(self):
1178 if not self.SourceFileList:
1179 return []
1180
1181 NameGuids = set()
1182 for SrcFile in self.SourceFileList:
1183 if SrcFile.Ext.lower() != '.vfr':
1184 continue
1185 Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
1186 if not os.path.exists(Vfri):
1187 continue
1188 VfriFile = open(Vfri, 'r')
1189 Content = VfriFile.read()
1190 VfriFile.close()
1191 Pos = Content.find('efivarstore')
1192 while Pos != -1:
1193 #
1194 # Make sure 'efivarstore' is the start of efivarstore statement
1195 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1196 #
1197 Index = Pos - 1
1198 while Index >= 0 and Content[Index] in ' \t\r\n':
1199 Index -= 1
1200 if Index >= 0 and Content[Index] != ';':
1201 Pos = Content.find('efivarstore', Pos + len('efivarstore'))
1202 continue
1203 #
1204 # 'efivarstore' must be followed by name and guid
1205 #
1206 Name = gEfiVarStoreNamePattern.search(Content, Pos)
1207 if not Name:
1208 break
1209 Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
1210 if not Guid:
1211 break
1212 NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
1213 NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
1214 Pos = Content.find('efivarstore', Name.end())
1215 if not NameGuids:
1216 return []
1217 HiiExPcds = []
1218 for Pcd in self.PlatformInfo.Pcds.values():
1219 if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
1220 continue
1221 for SkuInfo in Pcd.SkuInfoList.values():
1222 Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
1223 if not Value:
1224 continue
1225 Name = _ConvertStringToByteArray(SkuInfo.VariableName)
1226 Guid = GuidStructureStringToGuidString(Value)
1227 if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
1228 HiiExPcds.append(Pcd)
1229 break
1230
1231 return HiiExPcds
1232
1233 def _GenOffsetBin(self):
1234 VfrUniBaseName = {}
1235 for SourceFile in self.Module.Sources:
1236 if SourceFile.Type.upper() == ".VFR" :
1237 #
1238 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1239 #
1240 VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
1241 elif SourceFile.Type.upper() == ".UNI" :
1242 #
1243 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1244 #
1245 VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
1246
1247 if not VfrUniBaseName:
1248 return None
1249 MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
1250 EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
1251 VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
1252 if not VfrUniOffsetList:
1253 return None
1254
1255 OutputName = '%sOffset.bin' % self.Name
1256 UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
1257
1258 try:
1259 fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
1260 except:
1261 EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
1262
1263 # Use a instance of BytesIO to cache data
1264 fStringIO = BytesIO()
1265
1266 for Item in VfrUniOffsetList:
1267 if (Item[0].find("Strings") != -1):
1268 #
1269 # UNI offset in image.
1270 # GUID + Offset
1271 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1272 #
1273 UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1274 fStringIO.write(UniGuid)
1275 UniValue = pack ('Q', int (Item[1], 16))
1276 fStringIO.write (UniValue)
1277 else:
1278 #
1279 # VFR binary offset in image.
1280 # GUID + Offset
1281 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1282 #
1283 VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1284 fStringIO.write(VfrGuid)
1285 VfrValue = pack ('Q', int (Item[1], 16))
1286 fStringIO.write (VfrValue)
1287 #
1288 # write data into file.
1289 #
1290 try :
1291 fInputfile.write (fStringIO.getvalue())
1292 except:
1293 EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
1294 "file been locked or using by other applications." %UniVfrOffsetFileName, None)
1295
1296 fStringIO.close ()
1297 fInputfile.close ()
1298 return OutputName
1299
1300 @cached_property
1301 def OutputFile(self):
1302 retVal = set()
1303
1304 for Root, Dirs, Files in os.walk(self.BuildDir):
1305 for File in Files:
1306 # lib file is already added through above CodaTargetList, skip it here
1307 if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):
1308 NewFile = path.join(Root, File)
1309 retVal.add(NewFile)
1310
1311 for Root, Dirs, Files in os.walk(self.FfsOutputDir):
1312 for File in Files:
1313 NewFile = path.join(Root, File)
1314 retVal.add(NewFile)
1315
1316 return retVal
1317
1318 ## Create AsBuilt INF file the module
1319 #
1320 def CreateAsBuiltInf(self):
1321
1322 if self.IsAsBuiltInfCreated:
1323 return
1324
1325 # Skip INF file generation for libraries
1326 if self.IsLibrary:
1327 return
1328
1329 # Skip the following code for modules with no source files
1330 if not self.SourceFileList:
1331 return
1332
1333 # Skip the following code for modules without any binary files
1334 if self.BinaryFileList:
1335 return
1336
1337 ### TODO: How to handles mixed source and binary modules
1338
1339 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1340 # Also find all packages that the DynamicEx PCDs depend on
1341 Pcds = []
1342 PatchablePcds = []
1343 Packages = []
1344 PcdCheckList = []
1345 PcdTokenSpaceList = []
1346 for Pcd in self.ModulePcdList + self.LibraryPcdList:
1347 if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
1348 PatchablePcds.append(Pcd)
1349 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
1350 elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
1351 if Pcd not in Pcds:
1352 Pcds.append(Pcd)
1353 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
1354 PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
1355 PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
1356 GuidList = OrderedDict(self.GuidList)
1357 for TokenSpace in self.GetGuidsUsedByPcd:
1358 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1359 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1360 if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
1361 GuidList.pop(TokenSpace)
1362 CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
1363 for Package in self.DerivedPackageList:
1364 if Package in Packages:
1365 continue
1366 BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
1367 Found = False
1368 for Index in range(len(BeChecked)):
1369 for Item in CheckList[Index]:
1370 if Item in BeChecked[Index]:
1371 Packages.append(Package)
1372 Found = True
1373 break
1374 if Found:
1375 break
1376
1377 VfrPcds = self._GetPcdsMaybeUsedByVfr()
1378 for Pkg in self.PlatformInfo.PackageList:
1379 if Pkg in Packages:
1380 continue
1381 for VfrPcd in VfrPcds:
1382 if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
1383 (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
1384 Packages.append(Pkg)
1385 break
1386
1387 ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
1388 DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
1389 Guid = self.Guid
1390 MDefs = self.Module.Defines
1391
1392 AsBuiltInfDict = {
1393 'module_name' : self.Name,
1394 'module_guid' : Guid,
1395 'module_module_type' : ModuleType,
1396 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
1397 'pcd_is_driver_string' : [],
1398 'module_uefi_specification_version' : [],
1399 'module_pi_specification_version' : [],
1400 'module_entry_point' : self.Module.ModuleEntryPointList,
1401 'module_unload_image' : self.Module.ModuleUnloadImageList,
1402 'module_constructor' : self.Module.ConstructorList,
1403 'module_destructor' : self.Module.DestructorList,
1404 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
1405 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
1406 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
1407 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
1408 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
1409 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
1410 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
1411 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
1412 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
1413 'module_arch' : self.Arch,
1414 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
1415 'binary_item' : [],
1416 'patchablepcd_item' : [],
1417 'pcd_item' : [],
1418 'protocol_item' : [],
1419 'ppi_item' : [],
1420 'guid_item' : [],
1421 'flags_item' : [],
1422 'libraryclasses_item' : []
1423 }
1424
1425 if 'MODULE_UNI_FILE' in MDefs:
1426 UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
1427 if os.path.isfile(UNIFile):
1428 shutil.copy2(UNIFile, self.OutputDir)
1429
1430 if self.AutoGenVersion > int(gInfSpecVersion, 0):
1431 AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
1432 else:
1433 AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
1434
1435 if DriverType:
1436 AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
1437
1438 if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
1439 AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
1440 if 'PI_SPECIFICATION_VERSION' in self.Specification:
1441 AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
1442
1443 OutputDir = self.OutputDir.replace('\\', '/').strip('/')
1444 DebugDir = self.DebugDir.replace('\\', '/').strip('/')
1445 for Item in self.CodaTargetList:
1446 File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
1447 if os.path.isabs(File):
1448 File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
1449 if Item.Target.Ext.lower() == '.aml':
1450 AsBuiltInfDict['binary_item'].append('ASL|' + File)
1451 elif Item.Target.Ext.lower() == '.acpi':
1452 AsBuiltInfDict['binary_item'].append('ACPI|' + File)
1453 elif Item.Target.Ext.lower() == '.efi':
1454 AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
1455 else:
1456 AsBuiltInfDict['binary_item'].append('BIN|' + File)
1457 if not self.DepexGenerated:
1458 DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
1459 if os.path.exists(DepexFile):
1460 self.DepexGenerated = True
1461 if self.DepexGenerated:
1462 if self.ModuleType in [SUP_MODULE_PEIM]:
1463 AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
1464 elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
1465 AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
1466 elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
1467 AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
1468
1469 Bin = self._GenOffsetBin()
1470 if Bin:
1471 AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
1472
1473 for Root, Dirs, Files in os.walk(OutputDir):
1474 for File in Files:
1475 if File.lower().endswith('.pdb'):
1476 AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
1477 HeaderComments = self.Module.HeaderComments
1478 StartPos = 0
1479 for Index in range(len(HeaderComments)):
1480 if HeaderComments[Index].find('@BinaryHeader') != -1:
1481 HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
1482 StartPos = Index
1483 break
1484 AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
1485 AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
1486
1487 GenList = [
1488 (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
1489 (self.PpiList, self._PpiComments, 'ppi_item'),
1490 (GuidList, self._GuidComments, 'guid_item')
1491 ]
1492 for Item in GenList:
1493 for CName in Item[0]:
1494 Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
1495 Entry = Comments + '\n ' + CName if Comments else CName
1496 AsBuiltInfDict[Item[2]].append(Entry)
1497 PatchList = parsePcdInfoFromMapFile(
1498 os.path.join(self.OutputDir, self.Name + '.map'),
1499 os.path.join(self.OutputDir, self.Name + '.efi')
1500 )
1501 if PatchList:
1502 for Pcd in PatchablePcds:
1503 TokenCName = Pcd.TokenCName
1504 for PcdItem in GlobalData.MixedPcd:
1505 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1506 TokenCName = PcdItem[0]
1507 break
1508 for PatchPcd in PatchList:
1509 if TokenCName == PatchPcd[0]:
1510 break
1511 else:
1512 continue
1513 PcdValue = ''
1514 if Pcd.DatumType == 'BOOLEAN':
1515 BoolValue = Pcd.DefaultValue.upper()
1516 if BoolValue == 'TRUE':
1517 Pcd.DefaultValue = '1'
1518 elif BoolValue == 'FALSE':
1519 Pcd.DefaultValue = '0'
1520
1521 if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
1522 HexFormat = '0x%02x'
1523 if Pcd.DatumType == TAB_UINT16:
1524 HexFormat = '0x%04x'
1525 elif Pcd.DatumType == TAB_UINT32:
1526 HexFormat = '0x%08x'
1527 elif Pcd.DatumType == TAB_UINT64:
1528 HexFormat = '0x%016x'
1529 PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
1530 else:
1531 if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
1532 EdkLogger.error("build", AUTOGEN_ERROR,
1533 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
1534 )
1535 ArraySize = int(Pcd.MaxDatumSize, 0)
1536 PcdValue = Pcd.DefaultValue
1537 if PcdValue[0] != '{':
1538 Unicode = False
1539 if PcdValue[0] == 'L':
1540 Unicode = True
1541 PcdValue = PcdValue.lstrip('L')
1542 PcdValue = eval(PcdValue)
1543 NewValue = '{'
1544 for Index in range(0, len(PcdValue)):
1545 if Unicode:
1546 CharVal = ord(PcdValue[Index])
1547 NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
1548 + '0x%02x' % (CharVal >> 8) + ', '
1549 else:
1550 NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
1551 Padding = '0x00, '
1552 if Unicode:
1553 Padding = Padding * 2
1554 ArraySize = ArraySize // 2
1555 if ArraySize < (len(PcdValue) + 1):
1556 if Pcd.MaxSizeUserSet:
1557 EdkLogger.error("build", AUTOGEN_ERROR,
1558 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1559 )
1560 else:
1561 ArraySize = len(PcdValue) + 1
1562 if ArraySize > len(PcdValue) + 1:
1563 NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
1564 PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
1565 elif len(PcdValue.split(',')) <= ArraySize:
1566 PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
1567 PcdValue += '}'
1568 else:
1569 if Pcd.MaxSizeUserSet:
1570 EdkLogger.error("build", AUTOGEN_ERROR,
1571 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
1572 )
1573 else:
1574 ArraySize = len(PcdValue) + 1
1575 PcdItem = '%s.%s|%s|0x%X' % \
1576 (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
1577 PcdComments = ''
1578 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1579 PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
1580 if PcdComments:
1581 PcdItem = PcdComments + '\n ' + PcdItem
1582 AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
1583
1584 for Pcd in Pcds + VfrPcds:
1585 PcdCommentList = []
1586 HiiInfo = ''
1587 TokenCName = Pcd.TokenCName
1588 for PcdItem in GlobalData.MixedPcd:
1589 if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
1590 TokenCName = PcdItem[0]
1591 break
1592 if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
1593 for SkuName in Pcd.SkuInfoList:
1594 SkuInfo = Pcd.SkuInfoList[SkuName]
1595 HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
1596 break
1597 if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
1598 PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
1599 if HiiInfo:
1600 UsageIndex = -1
1601 UsageStr = ''
1602 for Index, Comment in enumerate(PcdCommentList):
1603 for Usage in UsageList:
1604 if Comment.find(Usage) != -1:
1605 UsageStr = Usage
1606 UsageIndex = Index
1607 break
1608 if UsageIndex != -1:
1609 PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
1610 else:
1611 PcdCommentList.append('## UNDEFINED ' + HiiInfo)
1612 PcdComments = '\n '.join(PcdCommentList)
1613 PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
1614 if PcdComments:
1615 PcdEntry = PcdComments + '\n ' + PcdEntry
1616 AsBuiltInfDict['pcd_item'].append(PcdEntry)
1617 for Item in self.BuildOption:
1618 if 'FLAGS' in self.BuildOption[Item]:
1619 AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
1620
1621 # Generated LibraryClasses section in comments.
1622 for Library in self.LibraryAutoGenList:
1623 AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
1624
1625 # Generated UserExtensions TianoCore section.
1626 # All tianocore user extensions are copied.
1627 UserExtStr = ''
1628 for TianoCore in self._GetTianoCoreUserExtensionList():
1629 UserExtStr += '\n'.join(TianoCore)
1630 ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
1631 if os.path.isfile(ExtensionFile):
1632 shutil.copy2(ExtensionFile, self.OutputDir)
1633 AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
1634
1635 # Generated depex expression section in comments.
1636 DepexExpression = self._GetDepexExpresionString()
1637 AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
1638
1639 AsBuiltInf = TemplateString()
1640 AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
1641
1642 SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
1643
1644 self.IsAsBuiltInfCreated = True
1645
1646 def CacheCopyFile(self, DestDir, SourceDir, File):
1647 if os.path.isdir(File):
1648 return
1649
1650 sub_dir = os.path.relpath(File, SourceDir)
1651 destination_file = os.path.join(DestDir, sub_dir)
1652 destination_dir = os.path.dirname(destination_file)
1653 CreateDirectory(destination_dir)
1654 try:
1655 CopyFileOnChange(File, destination_dir)
1656 except:
1657 EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
1658 return
1659
1660 def CopyModuleToCache(self):
1661 # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList
1662 # and PreMakeHashFileList files
1663 MakeHashStr = None
1664 PreMakeHashStr = None
1665 MakeTimeStamp = 0
1666 PreMakeTimeStamp = 0
1667 Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]
1668 for File in Files:
1669 if ".MakeHashFileList." in File:
1670 #find lastest file through time stamp
1671 FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
1672 if FileTimeStamp > MakeTimeStamp:
1673 MakeTimeStamp = FileTimeStamp
1674 MakeHashStr = File.split('.')[-1]
1675 if len(MakeHashStr) != 32:
1676 EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))
1677 if ".PreMakeHashFileList." in File:
1678 FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
1679 if FileTimeStamp > PreMakeTimeStamp:
1680 PreMakeTimeStamp = FileTimeStamp
1681 PreMakeHashStr = File.split('.')[-1]
1682 if len(PreMakeHashStr) != 32:
1683 EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))
1684
1685 if not MakeHashStr:
1686 EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
1687 return
1688 if not PreMakeHashStr:
1689 EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
1690 return
1691
1692 # Create Cache destination dirs
1693 FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
1694 FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
1695 CacheFileDir = path.join(FileDir, MakeHashStr)
1696 CacheFfsDir = path.join(FfsDir, MakeHashStr)
1697 CreateDirectory (CacheFileDir)
1698 CreateDirectory (CacheFfsDir)
1699
1700 # Create ModuleHashPair file to support multiple version cache together
1701 ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
1702 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1703 if os.path.exists(ModuleHashPair):
1704 with open(ModuleHashPair, 'r') as f:
1705 ModuleHashPairList = json.load(f)
1706 if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):
1707 ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))
1708 with open(ModuleHashPair, 'w') as f:
1709 json.dump(ModuleHashPairList, f, indent=2)
1710
1711 # Copy files to Cache destination dirs
1712 if not self.OutputFile:
1713 Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
1714 self.OutputFile = Ma.Binaries
1715 for File in self.OutputFile:
1716 if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):
1717 self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)
1718 else:
1719 if self.Name + ".autogen.hash." in File or \
1720 self.Name + ".autogen.hashchain." in File or \
1721 self.Name + ".hash." in File or \
1722 self.Name + ".hashchain." in File or \
1723 self.Name + ".PreMakeHashFileList." in File or \
1724 self.Name + ".MakeHashFileList." in File:
1725 self.CacheCopyFile(FileDir, self.BuildDir, File)
1726 else:
1727 self.CacheCopyFile(CacheFileDir, self.BuildDir, File)
1728 ## Create makefile for the module and its dependent libraries
1729 #
1730 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1731 # dependent libraries will be created
1732 #
1733 @cached_class_function
1734 def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
1735
1736 # nest this function inside it's only caller.
1737 def CreateTimeStamp():
1738 FileSet = {self.MetaFile.Path}
1739
1740 for SourceFile in self.Module.Sources:
1741 FileSet.add (SourceFile.Path)
1742
1743 for Lib in self.DependentLibraryList:
1744 FileSet.add (Lib.MetaFile.Path)
1745
1746 for f in self.AutoGenDepSet:
1747 FileSet.add (f.Path)
1748
1749 if os.path.exists (self.TimeStampPath):
1750 os.remove (self.TimeStampPath)
1751
1752 SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
1753
1754 # Ignore generating makefile when it is a binary module
1755 if self.IsBinaryModule:
1756 return
1757
1758 self.GenFfsList = GenFfsList
1759
1760 if not self.IsLibrary and CreateLibraryMakeFile:
1761 for LibraryAutoGen in self.LibraryAutoGenList:
1762 LibraryAutoGen.CreateMakeFile()
1763
1764 # CanSkip uses timestamps to determine build skipping
1765 if self.CanSkip():
1766 return
1767
1768 if len(self.CustomMakefile) == 0:
1769 Makefile = GenMake.ModuleMakefile(self)
1770 else:
1771 Makefile = GenMake.CustomMakefile(self)
1772 if Makefile.Generate():
1773 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
1774 (self.Name, self.Arch))
1775 else:
1776 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
1777 (self.Name, self.Arch))
1778
1779 CreateTimeStamp()
1780
1781 MakefileType = Makefile._FileType
1782 MakefileName = Makefile._FILE_NAME_[MakefileType]
1783 MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
1784 FilePath = path.join(self.BuildDir, self.Name + ".makefile")
1785 SaveFileOnChange(FilePath, MakefilePath, False)
1786
1787 def CopyBinaryFiles(self):
1788 for File in self.Module.Binaries:
1789 SrcPath = File.Path
1790 DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
1791 CopyLongFilePath(SrcPath, DstPath)
1792 ## Create autogen code for the module and its dependent libraries
1793 #
1794 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1795 # dependent libraries will be created
1796 #
1797 def CreateCodeFile(self, CreateLibraryCodeFile=True):
1798
1799 if self.IsCodeFileCreated:
1800 return
1801
1802 # Need to generate PcdDatabase even PcdDriver is binarymodule
1803 if self.IsBinaryModule and self.PcdIsDriver != '':
1804 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
1805 return
1806 if self.IsBinaryModule:
1807 if self.IsLibrary:
1808 self.CopyBinaryFiles()
1809 return
1810
1811 if not self.IsLibrary and CreateLibraryCodeFile:
1812 for LibraryAutoGen in self.LibraryAutoGenList:
1813 LibraryAutoGen.CreateCodeFile()
1814
1815 self.LibraryAutoGenList
1816 AutoGenList = []
1817 IgoredAutoGenList = []
1818
1819 for File in self.AutoGenFileList:
1820 if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
1821 AutoGenList.append(str(File))
1822 else:
1823 IgoredAutoGenList.append(str(File))
1824
1825
1826 for ModuleType in self.DepexList:
1827 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1828 if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
1829 continue
1830
1831 Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
1832 DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
1833
1834 if len(Dpx.PostfixNotation) != 0:
1835 self.DepexGenerated = True
1836
1837 if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
1838 AutoGenList.append(str(DpxFile))
1839 else:
1840 IgoredAutoGenList.append(str(DpxFile))
1841
1842 if IgoredAutoGenList == []:
1843 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
1844 (" ".join(AutoGenList), self.Name, self.Arch))
1845 elif AutoGenList == []:
1846 EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
1847 (" ".join(IgoredAutoGenList), self.Name, self.Arch))
1848 else:
1849 EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
1850 (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
1851
1852 self.IsCodeFileCreated = True
1853
1854 return AutoGenList
1855
1856 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1857 @cached_property
1858 def LibraryAutoGenList(self):
1859 RetVal = []
1860 for Library in self.DependentLibraryList:
1861 La = ModuleAutoGen(
1862 self.Workspace,
1863 Library.MetaFile,
1864 self.BuildTarget,
1865 self.ToolChain,
1866 self.Arch,
1867 self.PlatformInfo.MetaFile,
1868 self.DataPipe
1869 )
1870 La.IsLibrary = True
1871 if La not in RetVal:
1872 RetVal.append(La)
1873 for Lib in La.CodaTargetList:
1874 self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
1875 return RetVal
1876
1877 def GenCMakeHash(self):
1878 # GenCMakeHash can only be called in --binary-destination
1879 # Never called in multiprocessing and always directly save result in main process,
1880 # so no need remote dict to share the gCMakeHashFile result with main process
1881
1882 DependencyFileSet = set()
1883 # Add AutoGen files
1884 if self.AutoGenFileList:
1885 for File in set(self.AutoGenFileList):
1886 DependencyFileSet.add(File)
1887
1888 # Add Makefile
1889 abspath = path.join(self.BuildDir, self.Name + ".makefile")
1890 try:
1891 with open(LongFilePath(abspath),"r") as fd:
1892 lines = fd.readlines()
1893 except Exception as e:
1894 EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
1895 if lines:
1896 DependencyFileSet.update(lines)
1897
1898 # Caculate all above dependency files hash
1899 # Initialze hash object
1900 FileList = []
1901 m = hashlib.md5()
1902 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
1903 if not path.exists(LongFilePath(str(File))):
1904 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
1905 continue
1906 with open(LongFilePath(str(File)), 'rb') as f:
1907 Content = f.read()
1908 m.update(Content)
1909 FileList.append((str(File), hashlib.md5(Content).hexdigest()))
1910
1911 HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())
1912 GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
1913 try:
1914 with open(LongFilePath(HashChainFile), 'w') as f:
1915 json.dump(FileList, f, indent=2)
1916 except:
1917 EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
1918 return False
1919
1920 def GenModuleHash(self):
1921 # GenModuleHash only called after autogen phase
1922 # Never called in multiprocessing and always directly save result in main process,
1923 # so no need remote dict to share the gModuleHashFile result with main process
1924 #
1925 # GenPreMakefileHashList consume no dict.
1926 # GenPreMakefileHashList produce local gModuleHashFile dict.
1927
1928 DependencyFileSet = set()
1929 # Add Module Meta file
1930 DependencyFileSet.add(self.MetaFile.Path)
1931
1932 # Add Module's source files
1933 if self.SourceFileList:
1934 for File in set(self.SourceFileList):
1935 DependencyFileSet.add(File.Path)
1936
1937 # Add modules's include header files
1938 # Directly use the deps.txt file in the module BuildDir
1939 abspath = path.join(self.BuildDir, "deps.txt")
1940 rt = None
1941 try:
1942 with open(LongFilePath(abspath),"r") as fd:
1943 lines = fd.readlines()
1944 if lines:
1945 rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])
1946 except Exception as e:
1947 EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
1948
1949 if rt:
1950 DependencyFileSet.update(rt)
1951
1952
1953 # Caculate all above dependency files hash
1954 # Initialze hash object
1955 FileList = []
1956 m = hashlib.md5()
1957 BuildDirStr = path.abspath(self.BuildDir).lower()
1958 for File in sorted(DependencyFileSet, key=lambda x: str(x)):
1959 # Skip the AutoGen files in BuildDir which already been
1960 # included in .autogen.hash. file
1961 if BuildDirStr in path.abspath(File).lower():
1962 continue
1963 if not path.exists(LongFilePath(File)):
1964 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
1965 continue
1966 with open(LongFilePath(File), 'rb') as f:
1967 Content = f.read()
1968 m.update(Content)
1969 FileList.append((File, hashlib.md5(Content).hexdigest()))
1970
1971 HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())
1972 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
1973 try:
1974 with open(LongFilePath(HashChainFile), 'w') as f:
1975 json.dump(FileList, f, indent=2)
1976 except:
1977 EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
1978 return False
1979
1980 def GenPreMakefileHashList(self):
1981 # GenPreMakefileHashList consume below dicts:
1982 # gPlatformHashFile
1983 # gPackageHashFile
1984 # gModuleHashFile
1985 # GenPreMakefileHashList produce no dict.
1986 # gModuleHashFile items might be produced in multiprocessing, so
1987 # need check gModuleHashFile remote dict
1988
1989 # skip binary module
1990 if self.IsBinaryModule:
1991 return
1992
1993 FileList = []
1994 m = hashlib.md5()
1995 # Add Platform level hash
1996 HashFile = GlobalData.gPlatformHashFile
1997 if path.exists(LongFilePath(HashFile)):
1998 FileList.append(HashFile)
1999 m.update(HashFile.encode('utf-8'))
2000 else:
2001 EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)
2002
2003 # Add Package level hash
2004 if self.DependentPackageList:
2005 for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
2006 if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:
2007 EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))
2008 continue
2009 HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]
2010 if path.exists(LongFilePath(HashFile)):
2011 FileList.append(HashFile)
2012 m.update(HashFile.encode('utf-8'))
2013 else:
2014 EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)
2015
2016 # Add Module self
2017 # GenPreMakefileHashList needed in both --binary-destination
2018 # and --hash. And --hash might save ModuleHashFile in remote dict
2019 # during multiprocessing.
2020 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
2021 HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
2022 else:
2023 EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2024 if path.exists(LongFilePath(HashFile)):
2025 FileList.append(HashFile)
2026 m.update(HashFile.encode('utf-8'))
2027 else:
2028 EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
2029
2030 # Add Library hash
2031 if self.LibraryAutoGenList:
2032 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
2033
2034 if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
2035 HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
2036 else:
2037 EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
2038 if path.exists(LongFilePath(HashFile)):
2039 FileList.append(HashFile)
2040 m.update(HashFile.encode('utf-8'))
2041 else:
2042 EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
2043
2044 # Save PreMakeHashFileList
2045 FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())
2046 try:
2047 with open(LongFilePath(FilePath), 'w') as f:
2048 json.dump(FileList, f, indent=0)
2049 except:
2050 EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)
2051
2052 def GenMakefileHashList(self):
2053 # GenMakefileHashList only need in --binary-destination which will
2054 # everything in local dict. So don't need check remote dict.
2055
2056 # skip binary module
2057 if self.IsBinaryModule:
2058 return
2059
2060 FileList = []
2061 m = hashlib.md5()
2062 # Add AutoGen hash
2063 HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]
2064 if path.exists(LongFilePath(HashFile)):
2065 FileList.append(HashFile)
2066 m.update(HashFile.encode('utf-8'))
2067 else:
2068 EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)
2069
2070 # Add Module self
2071 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
2072 HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
2073 else:
2074 EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
2075 if path.exists(LongFilePath(HashFile)):
2076 FileList.append(HashFile)
2077 m.update(HashFile.encode('utf-8'))
2078 else:
2079 EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
2080
2081 # Add Library hash
2082 if self.LibraryAutoGenList:
2083 for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
2084 if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
2085 HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
2086 else:
2087 EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
2088 if path.exists(LongFilePath(HashFile)):
2089 FileList.append(HashFile)
2090 m.update(HashFile.encode('utf-8'))
2091 else:
2092 EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
2093
2094 # Save MakeHashFileList
2095 FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())
2096 try:
2097 with open(LongFilePath(FilePath), 'w') as f:
2098 json.dump(FileList, f, indent=0)
2099 except:
2100 EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)
2101
2102 def CheckHashChainFile(self, HashChainFile):
2103 # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'
2104 # The x is module name and the 16BytesHexStr is md5 hexdigest of
2105 # all hashchain files content
2106 HashStr = HashChainFile.split('.')[-1]
2107 if len(HashStr) != 32:
2108 EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))
2109 return False
2110
2111 try:
2112 with open(LongFilePath(HashChainFile), 'r') as f:
2113 HashChainList = json.load(f)
2114 except:
2115 EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)
2116 return False
2117
2118 # Print the different file info
2119 # print(HashChainFile)
2120 for idx, (SrcFile, SrcHash) in enumerate (HashChainList):
2121 if SrcFile in GlobalData.gFileHashDict:
2122 DestHash = GlobalData.gFileHashDict[SrcFile]
2123 else:
2124 try:
2125 with open(LongFilePath(SrcFile), 'rb') as f:
2126 Content = f.read()
2127 DestHash = hashlib.md5(Content).hexdigest()
2128 GlobalData.gFileHashDict[SrcFile] = DestHash
2129 except IOError as X:
2130 # cache miss if SrcFile is removed in new version code
2131 GlobalData.gFileHashDict[SrcFile] = 0
2132 EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
2133 return False
2134 if SrcHash != DestHash:
2135 EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
2136 return False
2137
2138 return True
2139
2140 ## Decide whether we can skip the left autogen and make process
2141 def CanSkipbyMakeCache(self):
2142 # For --binary-source only
2143 # CanSkipbyMakeCache consume below dicts:
2144 # gModuleMakeCacheStatus
2145 # gHashChainStatus
2146 # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.
2147 # all these dicts might be produced in multiprocessing, so
2148 # need check these remote dict
2149
2150 if not GlobalData.gBinCacheSource:
2151 return False
2152
2153 if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:
2154 return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
2155
2156 # If Module is binary, which has special build rule, do not skip by cache.
2157 if self.IsBinaryModule:
2158 print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
2159 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2160 return False
2161
2162 # see .inc as binary file, do not skip by hash
2163 for f_ext in self.SourceFileList:
2164 if '.inc' in str(f_ext):
2165 print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
2166 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2167 return False
2168
2169 ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2170 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2171
2172 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2173 ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
2174 try:
2175 with open(LongFilePath(ModuleHashPair), 'r') as f:
2176 ModuleHashPairList = json.load(f)
2177 except:
2178 # ModuleHashPair might not exist for new added module
2179 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2180 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2181 print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
2182 return False
2183
2184 # Check the PreMakeHash in ModuleHashPairList one by one
2185 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2186 SourceHashDir = path.join(ModuleCacheDir, MakeHash)
2187 SourceFfsHashDir = path.join(FfsDir, MakeHash)
2188 PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
2189 MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
2190
2191 try:
2192 with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:
2193 MakeHashFileList = json.load(f)
2194 except:
2195 EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)
2196 continue
2197
2198 HashMiss = False
2199 for HashChainFile in MakeHashFileList:
2200 HashChainStatus = None
2201 if HashChainFile in GlobalData.gHashChainStatus:
2202 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
2203 if HashChainStatus == False:
2204 HashMiss = True
2205 break
2206 elif HashChainStatus == True:
2207 continue
2208 # Convert to path start with cache source dir
2209 RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
2210 NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
2211 if self.CheckHashChainFile(NewFilePath):
2212 GlobalData.gHashChainStatus[HashChainFile] = True
2213 # Save the module self HashFile for GenPreMakefileHashList later usage
2214 if self.Name + ".hashchain." in HashChainFile:
2215 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
2216 else:
2217 GlobalData.gHashChainStatus[HashChainFile] = False
2218 HashMiss = True
2219 break
2220
2221 if HashMiss:
2222 continue
2223
2224 # PreMakefile cache hit, restore the module build result
2225 for root, dir, files in os.walk(SourceHashDir):
2226 for f in files:
2227 File = path.join(root, f)
2228 self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
2229 if os.path.exists(SourceFfsHashDir):
2230 for root, dir, files in os.walk(SourceFfsHashDir):
2231 for f in files:
2232 File = path.join(root, f)
2233 self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
2234
2235 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2236 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2237
2238 print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)
2239 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
2240 return True
2241
2242 print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
2243 GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2244 return False
2245
2246 ## Decide whether we can skip the left autogen and make process
2247 def CanSkipbyPreMakeCache(self):
2248 # CanSkipbyPreMakeCache consume below dicts:
2249 # gModulePreMakeCacheStatus
2250 # gHashChainStatus
2251 # gModuleHashFile
2252 # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.
2253 # all these dicts might be produced in multiprocessing, so
2254 # need check these remote dicts
2255
2256 if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:
2257 return False
2258
2259 if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:
2260 return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
2261
2262 # If Module is binary, which has special build rule, do not skip by cache.
2263 if self.IsBinaryModule:
2264 print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
2265 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2266 return False
2267
2268 # see .inc as binary file, do not skip by hash
2269 for f_ext in self.SourceFileList:
2270 if '.inc' in str(f_ext):
2271 print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
2272 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2273 return False
2274
2275 # For --hash only in the incremental build
2276 if not GlobalData.gBinCacheSource:
2277 Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]
2278 PreMakeHashFileList_FilePah = None
2279 MakeTimeStamp = 0
2280 # Find latest PreMakeHashFileList file in self.BuildDir folder
2281 for File in Files:
2282 if ".PreMakeHashFileList." in File:
2283 FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]
2284 if FileTimeStamp > MakeTimeStamp:
2285 MakeTimeStamp = FileTimeStamp
2286 PreMakeHashFileList_FilePah = File
2287 if not PreMakeHashFileList_FilePah:
2288 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2289 return False
2290
2291 try:
2292 with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
2293 PreMakeHashFileList = json.load(f)
2294 except:
2295 EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
2296 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2297 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2298 return False
2299
2300 HashMiss = False
2301 for HashChainFile in PreMakeHashFileList:
2302 HashChainStatus = None
2303 if HashChainFile in GlobalData.gHashChainStatus:
2304 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
2305 if HashChainStatus == False:
2306 HashMiss = True
2307 break
2308 elif HashChainStatus == True:
2309 continue
2310 if self.CheckHashChainFile(HashChainFile):
2311 GlobalData.gHashChainStatus[HashChainFile] = True
2312 # Save the module self HashFile for GenPreMakefileHashList later usage
2313 if self.Name + ".hashchain." in HashChainFile:
2314 GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
2315 else:
2316 GlobalData.gHashChainStatus[HashChainFile] = False
2317 HashMiss = True
2318 break
2319
2320 if HashMiss:
2321 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2322 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2323 return False
2324 else:
2325 print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2326 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
2327 return True
2328
2329 ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
2330 FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
2331
2332 ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2333 ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
2334 try:
2335 with open(LongFilePath(ModuleHashPair), 'r') as f:
2336 ModuleHashPairList = json.load(f)
2337 except:
2338 # ModuleHashPair might not exist for new added module
2339 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2340 EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
2341 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2342 return False
2343
2344 # Check the PreMakeHash in ModuleHashPairList one by one
2345 for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
2346 SourceHashDir = path.join(ModuleCacheDir, MakeHash)
2347 SourceFfsHashDir = path.join(FfsDir, MakeHash)
2348 PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
2349 MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
2350
2351 try:
2352 with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
2353 PreMakeHashFileList = json.load(f)
2354 except:
2355 EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
2356 continue
2357
2358 HashMiss = False
2359 for HashChainFile in PreMakeHashFileList:
2360 HashChainStatus = None
2361 if HashChainFile in GlobalData.gHashChainStatus:
2362 HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
2363 if HashChainStatus == False:
2364 HashMiss = True
2365 break
2366 elif HashChainStatus == True:
2367 continue
2368 # Convert to path start with cache source dir
2369 RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
2370 NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
2371 if self.CheckHashChainFile(NewFilePath):
2372 GlobalData.gHashChainStatus[HashChainFile] = True
2373 else:
2374 GlobalData.gHashChainStatus[HashChainFile] = False
2375 HashMiss = True
2376 break
2377
2378 if HashMiss:
2379 continue
2380
2381 # PreMakefile cache hit, restore the module build result
2382 for root, dir, files in os.walk(SourceHashDir):
2383 for f in files:
2384 File = path.join(root, f)
2385 self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
2386 if os.path.exists(SourceFfsHashDir):
2387 for root, dir, files in os.walk(SourceFfsHashDir):
2388 for f in files:
2389 File = path.join(root, f)
2390 self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
2391
2392 if self.Name == "PcdPeim" or self.Name == "PcdDxe":
2393 CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
2394
2395 print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2396 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
2397 return True
2398
2399 print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
2400 GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
2401 return False
2402
2403 ## Decide whether we can skip the Module build
2404 def CanSkipbyCache(self, gHitSet):
2405 # Hashing feature is off
2406 if not GlobalData.gBinCacheSource:
2407 return False
2408
2409 if self in gHitSet:
2410 return True
2411
2412 return False
2413
2414 ## Decide whether we can skip the ModuleAutoGen process
2415 # If any source file is newer than the module than we cannot skip
2416 #
2417 def CanSkip(self):
2418 # Don't skip if cache feature enabled
2419 if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
2420 return False
2421 if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
2422 return True
2423 if not os.path.exists(self.TimeStampPath):
2424 return False
2425 #last creation time of the module
2426 DstTimeStamp = os.stat(self.TimeStampPath)[8]
2427
2428 SrcTimeStamp = self.Workspace._SrcTimeStamp
2429 if SrcTimeStamp > DstTimeStamp:
2430 return False
2431
2432 with open(self.TimeStampPath,'r') as f:
2433 for source in f:
2434 source = source.rstrip('\n')
2435 if not os.path.exists(source):
2436 return False
2437 if source not in ModuleAutoGen.TimeDict :
2438 ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
2439 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
2440 return False
2441 GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
2442 return True
2443
2444 @cached_property
2445 def TimeStampPath(self):
2446 return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')