2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 from AutoGen
.AutoGen
import AutoGen
9 from Common
.LongFilePathSupport
import CopyLongFilePath
10 from Common
.BuildToolError
import *
11 from Common
.DataType
import *
12 from Common
.Misc
import *
13 from Common
.StringUtils
import NormPath
,GetSplitList
14 from collections
import defaultdict
15 from Workspace
.WorkspaceCommon
import OrderedListDict
16 import os
.path
as path
19 from . import InfSectionParser
22 from . import GenDepex
23 from io
import BytesIO
24 from GenPatchPcdTable
.GenPatchPcdTable
import parsePcdInfoFromMapFile
25 from Workspace
.MetaFileCommentParser
import UsageList
26 from .GenPcdDb
import CreatePcdDatabaseCode
27 from Common
.caching
import cached_class_function
28 from AutoGen
.ModuleAutoGenHelper
import PlatformInfo
,WorkSpaceInfo
29 from AutoGen
.CacheIR
import ModuleBuildCacheIR
33 ## Mapping Makefile type
34 gMakeTypeMap
= {TAB_COMPILER_MSFT
:"nmake", "GCC":"gmake"}
36 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
37 # is the former use /I , the Latter used -I to specify include directories
39 gBuildOptIncludePatternMsft
= re
.compile(r
"(?:.*?)/I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
40 gBuildOptIncludePatternOther
= re
.compile(r
"(?:.*?)-I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
42 ## default file name for AutoGen
43 gAutoGenCodeFileName
= "AutoGen.c"
44 gAutoGenHeaderFileName
= "AutoGen.h"
45 gAutoGenStringFileName
= "%(module_name)sStrDefs.h"
46 gAutoGenStringFormFileName
= "%(module_name)sStrDefs.hpk"
47 gAutoGenDepexFileName
= "%(module_name)s.depex"
48 gAutoGenImageDefFileName
= "%(module_name)sImgDefs.h"
49 gAutoGenIdfFileName
= "%(module_name)sIdf.hpk"
50 gInfSpecVersion
= "0x00010017"
53 # Match name = variable
55 gEfiVarStoreNamePattern
= re
.compile("\s*name\s*=\s*(\w+)")
57 # The format of guid in efivarstore statement likes following and must be correct:
58 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
60 gEfiVarStoreGuidPattern
= re
.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
63 # Template string to generic AsBuilt INF
65 gAsBuiltInfHeaderString
= TemplateString("""${header_comments}
71 INF_VERSION = ${module_inf_version}
72 BASE_NAME = ${module_name}
73 FILE_GUID = ${module_guid}
74 MODULE_TYPE = ${module_module_type}${BEGIN}
75 VERSION_STRING = ${module_version_string}${END}${BEGIN}
76 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
77 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
78 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
79 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
80 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
81 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
82 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
83 SHADOW = ${module_shadow}${END}${BEGIN}
84 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
85 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
86 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
87 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
88 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
89 SPEC = ${module_spec}${END}${BEGIN}
90 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
91 MODULE_UNI_FILE = ${module_uni_file}${END}
93 [Packages.${module_arch}]${BEGIN}
96 [Binaries.${module_arch}]${BEGIN}
99 [PatchPcd.${module_arch}]${BEGIN}
103 [Protocols.${module_arch}]${BEGIN}
107 [Ppis.${module_arch}]${BEGIN}
111 [Guids.${module_arch}]${BEGIN}
115 [PcdEx.${module_arch}]${BEGIN}
119 [LibraryClasses.${module_arch}]
120 ## @LIB_INSTANCES${BEGIN}
121 # ${libraryclasses_item}${END}
125 ${userextension_tianocore_item}
129 [BuildOptions.${module_arch}]
131 ## ${flags_item}${END}
134 # extend lists contained in a dictionary with lists stored in another dictionary
135 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
137 def ExtendCopyDictionaryLists(CopyToDict
, CopyFromDict
):
138 for Key
in CopyFromDict
:
139 CopyToDict
[Key
].extend(CopyFromDict
[Key
])
141 # Create a directory specified by a set of path elements and return the full path
142 def _MakeDir(PathList
):
143 RetVal
= path
.join(*PathList
)
144 CreateDirectory(RetVal
)
148 # Convert string to C format array
150 def _ConvertStringToByteArray(Value
):
151 Value
= Value
.strip()
155 if not Value
.endswith('}'):
157 Value
= Value
.replace(' ', '').replace('{', '').replace('}', '')
158 ValFields
= Value
.split(',')
160 for Index
in range(len(ValFields
)):
161 ValFields
[Index
] = str(int(ValFields
[Index
], 0))
164 Value
= '{' + ','.join(ValFields
) + '}'
168 if Value
.startswith('L"'):
169 if not Value
.endswith('"'):
173 elif not Value
.startswith('"') or not Value
.endswith('"'):
176 Value
= eval(Value
) # translate escape character
178 for Index
in range(0, len(Value
)):
180 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x10000) + ','
182 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x100) + ','
183 Value
= NewValue
+ '0}'
186 ## ModuleAutoGen class
188 # This class encapsules the AutoGen behaviors for the build tools. In addition to
189 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
190 # to the [depex] section in module's inf file.
192 class ModuleAutoGen(AutoGen
):
193 # call super().__init__ then call the worker function with different parameter count
194 def __init__(self
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
195 if not hasattr(self
, "_Init"):
196 self
._InitWorker
(Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
)
199 ## Cache the timestamps of metafiles of every module in a class attribute
203 def __new__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
204 # check if this module is employed by active platform
205 if not PlatformInfo(Workspace
, args
[0], Target
, Toolchain
, Arch
,args
[-1]).ValidModule(MetaFile
):
206 EdkLogger
.verbose("Module [%s] for [%s] is not employed by active platform\n" \
209 return super(ModuleAutoGen
, cls
).__new
__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
)
211 ## Initialize ModuleAutoGen
213 # @param Workspace EdkIIWorkspaceBuild object
214 # @param ModuleFile The path of module file
215 # @param Target Build target (DEBUG, RELEASE)
216 # @param Toolchain Name of tool chain
217 # @param Arch The arch the module supports
218 # @param PlatformFile Platform meta-file
220 def _InitWorker(self
, Workspace
, ModuleFile
, Target
, Toolchain
, Arch
, PlatformFile
,DataPipe
):
221 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "AutoGen module [%s] [%s]" % (ModuleFile
, Arch
))
222 GlobalData
.gProcessingFile
= "%s [%s, %s, %s]" % (ModuleFile
, Arch
, Toolchain
, Target
)
224 self
.Workspace
= Workspace
225 self
.WorkspaceDir
= ""
226 self
.PlatformInfo
= None
227 self
.DataPipe
= DataPipe
228 self
.__init
_platform
_info
__()
229 self
.MetaFile
= ModuleFile
230 self
.SourceDir
= self
.MetaFile
.SubDir
231 self
.SourceDir
= mws
.relpath(self
.SourceDir
, self
.WorkspaceDir
)
233 self
.ToolChain
= Toolchain
234 self
.BuildTarget
= Target
236 self
.ToolChainFamily
= self
.PlatformInfo
.ToolChainFamily
237 self
.BuildRuleFamily
= self
.PlatformInfo
.BuildRuleFamily
239 self
.IsCodeFileCreated
= False
240 self
.IsAsBuiltInfCreated
= False
241 self
.DepexGenerated
= False
243 self
.BuildDatabase
= self
.Workspace
.BuildDatabase
244 self
.BuildRuleOrder
= None
247 self
._GuidComments
= OrderedListDict()
248 self
._ProtocolComments
= OrderedListDict()
249 self
._PpiComments
= OrderedListDict()
250 self
._BuildTargets
= None
251 self
._IntroBuildTargetList
= None
252 self
._FinalBuildTargetList
= None
253 self
._FileTypes
= None
255 self
.AutoGenDepSet
= set()
256 self
.ReferenceModules
= []
259 self
.FileDependCache
= {}
261 def __init_platform_info__(self
):
262 pinfo
= self
.DataPipe
.Get("P_Info")
263 self
.WorkspaceDir
= pinfo
.get("WorkspaceDir")
264 self
.PlatformInfo
= PlatformInfo(self
.Workspace
,pinfo
.get("ActivePlatform"),pinfo
.get("Target"),pinfo
.get("ToolChain"),pinfo
.get("Arch"),self
.DataPipe
)
265 ## hash() operator of ModuleAutoGen
267 # The module file path and arch string will be used to represent
268 # hash value of this object
270 # @retval int Hash value of the module file path and arch
272 @cached_class_function
274 return hash((self
.MetaFile
, self
.Arch
))
276 return "%s [%s]" % (self
.MetaFile
, self
.Arch
)
278 # Get FixedAtBuild Pcds of this Module
280 def FixedAtBuildPcds(self
):
282 for Pcd
in self
.ModulePcdList
:
283 if Pcd
.Type
!= TAB_PCDS_FIXED_AT_BUILD
:
285 if Pcd
not in RetVal
:
290 def FixedVoidTypePcds(self
):
292 for Pcd
in self
.FixedAtBuildPcds
:
293 if Pcd
.DatumType
== TAB_VOID
:
294 if '.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
)) not in RetVal
:
295 RetVal
['.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
))] = Pcd
.DefaultValue
299 def UniqueBaseName(self
):
300 ModuleNames
= self
.DataPipe
.Get("M_Name")
303 return ModuleNames
.get((self
.Name
,self
.MetaFile
),self
.Name
)
305 # Macros could be used in build_rule.txt (also Makefile)
309 ("WORKSPACE" ,self
.WorkspaceDir
),
310 ("MODULE_NAME" ,self
.Name
),
311 ("MODULE_NAME_GUID" ,self
.UniqueBaseName
),
312 ("MODULE_GUID" ,self
.Guid
),
313 ("MODULE_VERSION" ,self
.Version
),
314 ("MODULE_TYPE" ,self
.ModuleType
),
315 ("MODULE_FILE" ,str(self
.MetaFile
)),
316 ("MODULE_FILE_BASE_NAME" ,self
.MetaFile
.BaseName
),
317 ("MODULE_RELATIVE_DIR" ,self
.SourceDir
),
318 ("MODULE_DIR" ,self
.SourceDir
),
319 ("BASE_NAME" ,self
.Name
),
321 ("TOOLCHAIN" ,self
.ToolChain
),
322 ("TOOLCHAIN_TAG" ,self
.ToolChain
),
323 ("TOOL_CHAIN_TAG" ,self
.ToolChain
),
324 ("TARGET" ,self
.BuildTarget
),
325 ("BUILD_DIR" ,self
.PlatformInfo
.BuildDir
),
326 ("BIN_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
327 ("LIB_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
328 ("MODULE_BUILD_DIR" ,self
.BuildDir
),
329 ("OUTPUT_DIR" ,self
.OutputDir
),
330 ("DEBUG_DIR" ,self
.DebugDir
),
331 ("DEST_DIR_OUTPUT" ,self
.OutputDir
),
332 ("DEST_DIR_DEBUG" ,self
.DebugDir
),
333 ("PLATFORM_NAME" ,self
.PlatformInfo
.Name
),
334 ("PLATFORM_GUID" ,self
.PlatformInfo
.Guid
),
335 ("PLATFORM_VERSION" ,self
.PlatformInfo
.Version
),
336 ("PLATFORM_RELATIVE_DIR" ,self
.PlatformInfo
.SourceDir
),
337 ("PLATFORM_DIR" ,mws
.join(self
.WorkspaceDir
, self
.PlatformInfo
.SourceDir
)),
338 ("PLATFORM_OUTPUT_DIR" ,self
.PlatformInfo
.OutputDir
),
339 ("FFS_OUTPUT_DIR" ,self
.FfsOutputDir
)
342 ## Return the module build data object
345 return self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
347 ## Return the module name
350 return self
.Module
.BaseName
352 ## Return the module DxsFile if exist
355 return self
.Module
.DxsFile
357 ## Return the module meta-file GUID
361 # To build same module more than once, the module path with FILE_GUID overridden has
362 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
363 # in DSC. The overridden GUID can be retrieved from file name
365 if os
.path
.basename(self
.MetaFile
.File
) != os
.path
.basename(self
.MetaFile
.Path
):
367 # Length of GUID is 36
369 return os
.path
.basename(self
.MetaFile
.Path
)[:36]
370 return self
.Module
.Guid
372 ## Return the module version
375 return self
.Module
.Version
377 ## Return the module type
379 def ModuleType(self
):
380 return self
.Module
.ModuleType
382 ## Return the component type (for Edk.x style of module)
384 def ComponentType(self
):
385 return self
.Module
.ComponentType
387 ## Return the build type
390 return self
.Module
.BuildType
392 ## Return the PCD_IS_DRIVER setting
394 def PcdIsDriver(self
):
395 return self
.Module
.PcdIsDriver
397 ## Return the autogen version, i.e. module meta-file version
399 def AutoGenVersion(self
):
400 return self
.Module
.AutoGenVersion
402 ## Check if the module is library or not
405 return bool(self
.Module
.LibraryClass
)
407 ## Check if the module is binary module or not
409 def IsBinaryModule(self
):
410 return self
.Module
.IsBinaryModule
412 ## Return the directory to store intermediate files of the module
416 self
.PlatformInfo
.BuildDir
,
419 self
.MetaFile
.BaseName
422 ## Return the directory to store the intermediate object files of the module
425 return _MakeDir((self
.BuildDir
, "OUTPUT"))
427 ## Return the directory path to store ffs file
429 def FfsOutputDir(self
):
430 if GlobalData
.gFdfParser
:
431 return path
.join(self
.PlatformInfo
.BuildDir
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
434 ## Return the directory to store auto-gened source files of the module
437 return _MakeDir((self
.BuildDir
, "DEBUG"))
439 ## Return the path of custom file
441 def CustomMakefile(self
):
443 for Type
in self
.Module
.CustomMakefile
:
444 MakeType
= gMakeTypeMap
[Type
] if Type
in gMakeTypeMap
else 'nmake'
445 File
= os
.path
.join(self
.SourceDir
, self
.Module
.CustomMakefile
[Type
])
446 RetVal
[MakeType
] = File
449 ## Return the directory of the makefile
451 # @retval string The directory string of module's makefile
454 def MakeFileDir(self
):
457 ## Return build command string
459 # @retval string Build command string
462 def BuildCommand(self
):
463 return self
.PlatformInfo
.BuildCommand
465 ## Get Module package and Platform package
467 # @retval list The list of package object
470 def PackageList(self
):
472 if self
.Module
.Packages
:
473 PkagList
.extend(self
.Module
.Packages
)
474 Platform
= self
.BuildDatabase
[self
.PlatformInfo
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
475 for Package
in Platform
.Packages
:
476 if Package
in PkagList
:
478 PkagList
.append(Package
)
481 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
483 # @retval list The list of package object
486 def DerivedPackageList(self
):
488 PackageList
.extend(self
.PackageList
)
489 for M
in self
.DependentLibraryList
:
490 for Package
in M
.Packages
:
491 if Package
in PackageList
:
493 PackageList
.append(Package
)
496 ## Get the depex string
498 # @return : a string contain all depex expression.
499 def _GetDepexExpresionString(self
):
502 ## DPX_SOURCE IN Define section.
503 if self
.Module
.DxsFile
:
505 for M
in [self
.Module
] + self
.DependentLibraryList
:
506 Filename
= M
.MetaFile
.Path
507 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
508 DepexExpressionList
= InfObj
.GetDepexExpresionList()
509 for DepexExpression
in DepexExpressionList
:
510 for key
in DepexExpression
:
511 Arch
, ModuleType
= key
512 DepexExpr
= [x
for x
in DepexExpression
[key
] if not str(x
).startswith('#')]
513 # the type of build module is USER_DEFINED.
514 # All different DEPEX section tags would be copied into the As Built INF file
515 # and there would be separate DEPEX section tags
516 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
517 if (Arch
.upper() == self
.Arch
.upper()) and (ModuleType
.upper() != TAB_ARCH_COMMON
):
518 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
520 if Arch
.upper() == TAB_ARCH_COMMON
or \
521 (Arch
.upper() == self
.Arch
.upper() and \
522 ModuleType
.upper() in [TAB_ARCH_COMMON
, self
.ModuleType
.upper()]):
523 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
525 #the type of build module is USER_DEFINED.
526 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
527 for Depex
in DepexList
:
529 DepexStr
+= '[Depex.%s.%s]\n' % key
530 DepexStr
+= '\n'.join('# '+ val
for val
in Depex
[key
])
533 return '[Depex.%s]\n' % self
.Arch
536 #the type of build module not is USER_DEFINED.
538 for Depex
in DepexList
:
543 for D
in Depex
.values():
544 DepexStr
+= ' '.join(val
for val
in D
)
545 Index
= DepexStr
.find('END')
546 if Index
> -1 and Index
== len(DepexStr
) - 3:
547 DepexStr
= DepexStr
[:-3]
548 DepexStr
= DepexStr
.strip()
551 DepexStr
= DepexStr
.lstrip('(').rstrip(')').strip()
553 return '[Depex.%s]\n' % self
.Arch
554 return '[Depex.%s]\n# ' % self
.Arch
+ DepexStr
556 ## Merge dependency expression
558 # @retval list The token list of the dependency expression after parsed
562 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
567 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
569 FixedVoidTypePcds
= {}
570 for M
in [self
] + self
.LibraryAutoGenList
:
571 FixedVoidTypePcds
.update(M
.FixedVoidTypePcds
)
572 for M
in [self
] + self
.LibraryAutoGenList
:
574 for D
in M
.Module
.Depex
[self
.Arch
, self
.ModuleType
]:
576 DepexList
.append('AND')
577 DepexList
.append('(')
578 #replace D with value if D is FixedAtBuild PCD
585 Value
= FixedVoidTypePcds
[item
]
586 if len(Value
.split(',')) != 16:
587 EdkLogger
.error("build", FORMAT_INVALID
,
588 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item
))
589 NewList
.append(Value
)
591 EdkLogger
.error("build", FORMAT_INVALID
, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item
))
593 DepexList
.extend(NewList
)
594 if DepexList
[-1] == 'END': # no need of a END at this time
596 DepexList
.append(')')
599 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.Module
.BaseName
, DepexList
))
600 if 'BEFORE' in DepexList
or 'AFTER' in DepexList
:
602 if len(DepexList
) > 0:
603 EdkLogger
.verbose('')
604 return {self
.ModuleType
:DepexList
}
606 ## Merge dependency expression
608 # @retval list The token list of the dependency expression after parsed
611 def DepexExpressionDict(self
):
612 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
615 DepexExpressionString
= ''
617 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
619 for M
in [self
.Module
] + self
.DependentLibraryList
:
621 for D
in M
.DepexExpression
[self
.Arch
, self
.ModuleType
]:
622 if DepexExpressionString
!= '':
623 DepexExpressionString
+= ' AND '
624 DepexExpressionString
+= '('
625 DepexExpressionString
+= D
626 DepexExpressionString
= DepexExpressionString
.rstrip('END').strip()
627 DepexExpressionString
+= ')'
630 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.BaseName
, DepexExpressionString
))
631 if 'BEFORE' in DepexExpressionString
or 'AFTER' in DepexExpressionString
:
633 if len(DepexExpressionString
) > 0:
634 EdkLogger
.verbose('')
636 return {self
.ModuleType
:DepexExpressionString
}
638 # Get the tiano core user extension, it is contain dependent library.
639 # @retval: a list contain tiano core userextension.
641 def _GetTianoCoreUserExtensionList(self
):
642 TianoCoreUserExtentionList
= []
643 for M
in [self
.Module
] + self
.DependentLibraryList
:
644 Filename
= M
.MetaFile
.Path
645 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
646 TianoCoreUserExtenList
= InfObj
.GetUserExtensionTianoCore()
647 for TianoCoreUserExtent
in TianoCoreUserExtenList
:
648 for Section
in TianoCoreUserExtent
:
649 ItemList
= Section
.split(TAB_SPLIT
)
651 if len(ItemList
) == 4:
653 if Arch
.upper() == TAB_ARCH_COMMON
or Arch
.upper() == self
.Arch
.upper():
655 TianoCoreList
.extend([TAB_SECTION_START
+ Section
+ TAB_SECTION_END
])
656 TianoCoreList
.extend(TianoCoreUserExtent
[Section
][:])
657 TianoCoreList
.append('\n')
658 TianoCoreUserExtentionList
.append(TianoCoreList
)
660 return TianoCoreUserExtentionList
662 ## Return the list of specification version required for the module
664 # @retval list The list of specification defined in module file
667 def Specification(self
):
668 return self
.Module
.Specification
670 ## Tool option for the module build
672 # @param PlatformInfo The object of PlatformBuildInfo
673 # @retval dict The dict containing valid options
676 def BuildOption(self
):
677 RetVal
, self
.BuildRuleOrder
= self
.PlatformInfo
.ApplyBuildOption(self
.Module
)
678 if self
.BuildRuleOrder
:
679 self
.BuildRuleOrder
= ['.%s' % Ext
for Ext
in self
.BuildRuleOrder
.split()]
682 ## Get include path list from tool option for the module build
684 # @retval list The include path list
687 def BuildOptionIncPathList(self
):
689 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
690 # is the former use /I , the Latter used -I to specify include directories
692 if self
.PlatformInfo
.ToolChainFamily
in (TAB_COMPILER_MSFT
):
693 BuildOptIncludeRegEx
= gBuildOptIncludePatternMsft
694 elif self
.PlatformInfo
.ToolChainFamily
in ('INTEL', 'GCC', 'RVCT'):
695 BuildOptIncludeRegEx
= gBuildOptIncludePatternOther
698 # New ToolChainFamily, don't known whether there is option to specify include directories
703 for Tool
in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
705 FlagOption
= self
.BuildOption
[Tool
]['FLAGS']
709 if self
.ToolChainFamily
!= 'RVCT':
710 IncPathList
= [NormPath(Path
, self
.Macros
) for Path
in BuildOptIncludeRegEx
.findall(FlagOption
)]
713 # RVCT may specify a list of directory seperated by commas
716 for Path
in BuildOptIncludeRegEx
.findall(FlagOption
):
717 PathList
= GetSplitList(Path
, TAB_COMMA_SPLIT
)
718 IncPathList
.extend(NormPath(PathEntry
, self
.Macros
) for PathEntry
in PathList
)
721 # EDK II modules must not reference header files outside of the packages they depend on or
722 # within the module's directory tree. Report error if violation.
724 if GlobalData
.gDisableIncludePathCheck
== False:
725 for Path
in IncPathList
:
726 if (Path
not in self
.IncludePathList
) and (CommonPath([Path
, self
.MetaFile
.Dir
]) != self
.MetaFile
.Dir
):
727 ErrMsg
= "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path
, Tool
, FlagOption
)
728 EdkLogger
.error("build",
731 File
=str(self
.MetaFile
))
732 RetVal
+= IncPathList
735 ## Return a list of files which can be built from source
737 # What kind of files can be built is determined by build rules in
738 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
741 def SourceFileList(self
):
743 ToolChainTagSet
= {"", TAB_STAR
, self
.ToolChain
}
744 ToolChainFamilySet
= {"", TAB_STAR
, self
.ToolChainFamily
, self
.BuildRuleFamily
}
745 for F
in self
.Module
.Sources
:
747 if F
.TagName
not in ToolChainTagSet
:
748 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "The toolchain [%s] for processing file [%s] is found, "
749 "but [%s] is currently used" % (F
.TagName
, str(F
), self
.ToolChain
))
751 # match tool chain family or build rule family
752 if F
.ToolChainFamily
not in ToolChainFamilySet
:
755 "The file [%s] must be built by tools of [%s], " \
756 "but current toolchain family is [%s], buildrule family is [%s]" \
757 % (str(F
), F
.ToolChainFamily
, self
.ToolChainFamily
, self
.BuildRuleFamily
))
760 # add the file path into search path list for file including
761 if F
.Dir
not in self
.IncludePathList
:
762 self
.IncludePathList
.insert(0, F
.Dir
)
765 self
._MatchBuildRuleOrder
(RetVal
)
768 self
._ApplyBuildRule
(F
, TAB_UNKNOWN_FILE
)
771 def _MatchBuildRuleOrder(self
, FileList
):
774 for SingleFile
in FileList
:
775 if self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRules
:
776 key
= SingleFile
.Path
.rsplit(SingleFile
.Ext
,1)[0]
777 if key
in Order_Dict
:
778 Order_Dict
[key
].append(SingleFile
.Ext
)
780 Order_Dict
[key
] = [SingleFile
.Ext
]
784 if len(Order_Dict
[F
]) > 1:
785 Order_Dict
[F
].sort(key
=lambda i
: self
.BuildRuleOrder
.index(i
))
786 for Ext
in Order_Dict
[F
][1:]:
787 RemoveList
.append(F
+ Ext
)
789 for item
in RemoveList
:
790 FileList
.remove(item
)
794 ## Return the list of unicode files
796 def UnicodeFileList(self
):
797 return self
.FileTypes
.get(TAB_UNICODE_FILE
,[])
799 ## Return the list of vfr files
801 def VfrFileList(self
):
802 return self
.FileTypes
.get(TAB_VFR_FILE
, [])
804 ## Return the list of Image Definition files
806 def IdfFileList(self
):
807 return self
.FileTypes
.get(TAB_IMAGE_FILE
,[])
809 ## Return a list of files which can be built from binary
811 # "Build" binary files are just to copy them to build directory.
813 # @retval list The list of files which can be built later
816 def BinaryFileList(self
):
818 for F
in self
.Module
.Binaries
:
819 if F
.Target
not in [TAB_ARCH_COMMON
, TAB_STAR
] and F
.Target
!= self
.BuildTarget
:
822 self
._ApplyBuildRule
(F
, F
.Type
, BinaryFileList
=RetVal
)
826 def BuildRules(self
):
828 BuildRuleDatabase
= self
.PlatformInfo
.BuildRule
829 for Type
in BuildRuleDatabase
.FileTypeList
:
830 #first try getting build rule by BuildRuleFamily
831 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.BuildRuleFamily
]
833 # build type is always module type, but ...
834 if self
.ModuleType
!= self
.BuildType
:
835 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.BuildRuleFamily
]
836 #second try getting build rule by ToolChainFamily
838 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.ToolChainFamily
]
840 # build type is always module type, but ...
841 if self
.ModuleType
!= self
.BuildType
:
842 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.ToolChainFamily
]
845 RuleObject
= RuleObject
.Instantiate(self
.Macros
)
846 RetVal
[Type
] = RuleObject
847 for Ext
in RuleObject
.SourceFileExtList
:
848 RetVal
[Ext
] = RuleObject
851 def _ApplyBuildRule(self
, File
, FileType
, BinaryFileList
=None):
852 if self
._BuildTargets
is None:
853 self
._IntroBuildTargetList
= set()
854 self
._FinalBuildTargetList
= set()
855 self
._BuildTargets
= defaultdict(set)
856 self
._FileTypes
= defaultdict(set)
858 if not BinaryFileList
:
859 BinaryFileList
= self
.BinaryFileList
861 SubDirectory
= os
.path
.join(self
.OutputDir
, File
.SubDir
)
862 if not os
.path
.exists(SubDirectory
):
863 CreateDirectory(SubDirectory
)
869 # Make sure to get build rule order value
873 while Index
< len(SourceList
):
874 Source
= SourceList
[Index
]
878 CreateDirectory(Source
.Dir
)
880 if File
.IsBinary
and File
== Source
and File
in BinaryFileList
:
881 # Skip all files that are not binary libraries
882 if not self
.IsLibrary
:
884 RuleObject
= self
.BuildRules
[TAB_DEFAULT_BINARY_FILE
]
885 elif FileType
in self
.BuildRules
:
886 RuleObject
= self
.BuildRules
[FileType
]
887 elif Source
.Ext
in self
.BuildRules
:
888 RuleObject
= self
.BuildRules
[Source
.Ext
]
890 # stop at no more rules
892 self
._FinalBuildTargetList
.add(LastTarget
)
895 FileType
= RuleObject
.SourceFileType
896 self
._FileTypes
[FileType
].add(Source
)
898 # stop at STATIC_LIBRARY for library
899 if self
.IsLibrary
and FileType
== TAB_STATIC_LIBRARY
:
901 self
._FinalBuildTargetList
.add(LastTarget
)
904 Target
= RuleObject
.Apply(Source
, self
.BuildRuleOrder
)
907 self
._FinalBuildTargetList
.add(LastTarget
)
909 elif not Target
.Outputs
:
910 # Only do build for target with outputs
911 self
._FinalBuildTargetList
.add(Target
)
913 self
._BuildTargets
[FileType
].add(Target
)
915 if not Source
.IsBinary
and Source
== File
:
916 self
._IntroBuildTargetList
.add(Target
)
918 # to avoid cyclic rule
919 if FileType
in RuleChain
:
922 RuleChain
.add(FileType
)
923 SourceList
.extend(Target
.Outputs
)
925 FileType
= TAB_UNKNOWN_FILE
929 if self
._BuildTargets
is None:
930 self
._IntroBuildTargetList
= set()
931 self
._FinalBuildTargetList
= set()
932 self
._BuildTargets
= defaultdict(set)
933 self
._FileTypes
= defaultdict(set)
935 #TRICK: call SourceFileList property to apply build rule for source files
938 #TRICK: call _GetBinaryFileList to apply build rule for binary files
941 return self
._BuildTargets
944 def IntroTargetList(self
):
946 return self
._IntroBuildTargetList
949 def CodaTargetList(self
):
951 return self
._FinalBuildTargetList
956 return self
._FileTypes
958 ## Get the list of package object the module depends on and the Platform depends on
960 # @retval list The package object list
963 def DependentPackageList(self
):
964 return self
.PackageList
966 ## Return the list of auto-generated code file
968 # @retval list The list of auto-generated file
971 def AutoGenFileList(self
):
972 AutoGenUniIdf
= self
.BuildType
!= 'UEFI_HII'
973 UniStringBinBuffer
= BytesIO()
974 IdfGenBinBuffer
= BytesIO()
976 AutoGenC
= TemplateString()
977 AutoGenH
= TemplateString()
978 StringH
= TemplateString()
979 StringIdf
= TemplateString()
980 GenC
.CreateCode(self
, AutoGenC
, AutoGenH
, StringH
, AutoGenUniIdf
, UniStringBinBuffer
, StringIdf
, AutoGenUniIdf
, IdfGenBinBuffer
)
982 # AutoGen.c is generated if there are library classes in inf, or there are object files
984 if str(AutoGenC
) != "" and (len(self
.Module
.LibraryClasses
) > 0
985 or TAB_OBJECT_FILE
in self
.FileTypes
):
986 AutoFile
= PathClass(gAutoGenCodeFileName
, self
.DebugDir
)
987 RetVal
[AutoFile
] = str(AutoGenC
)
988 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
989 if str(AutoGenH
) != "":
990 AutoFile
= PathClass(gAutoGenHeaderFileName
, self
.DebugDir
)
991 RetVal
[AutoFile
] = str(AutoGenH
)
992 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
993 if str(StringH
) != "":
994 AutoFile
= PathClass(gAutoGenStringFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
995 RetVal
[AutoFile
] = str(StringH
)
996 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
997 if UniStringBinBuffer
is not None and UniStringBinBuffer
.getvalue() != b
"":
998 AutoFile
= PathClass(gAutoGenStringFormFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
999 RetVal
[AutoFile
] = UniStringBinBuffer
.getvalue()
1000 AutoFile
.IsBinary
= True
1001 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
1002 if UniStringBinBuffer
is not None:
1003 UniStringBinBuffer
.close()
1004 if str(StringIdf
) != "":
1005 AutoFile
= PathClass(gAutoGenImageDefFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
1006 RetVal
[AutoFile
] = str(StringIdf
)
1007 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
1008 if IdfGenBinBuffer
is not None and IdfGenBinBuffer
.getvalue() != b
"":
1009 AutoFile
= PathClass(gAutoGenIdfFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
1010 RetVal
[AutoFile
] = IdfGenBinBuffer
.getvalue()
1011 AutoFile
.IsBinary
= True
1012 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
1013 if IdfGenBinBuffer
is not None:
1014 IdfGenBinBuffer
.close()
1017 ## Return the list of library modules explicitly or implicitly used by this module
1019 def DependentLibraryList(self
):
1020 # only merge library classes and PCD for non-library module
1023 return self
.PlatformInfo
.ApplyLibraryInstance(self
.Module
)
1025 ## Get the list of PCDs from current module
1027 # @retval list The list of PCD
1030 def ModulePcdList(self
):
1031 # apply PCD settings from platform
1032 RetVal
= self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, self
.Module
.Pcds
)
1036 def _PcdComments(self
):
1037 ReVal
= OrderedListDict()
1038 ExtendCopyDictionaryLists(ReVal
, self
.Module
.PcdComments
)
1039 if not self
.IsLibrary
:
1040 for Library
in self
.DependentLibraryList
:
1041 ExtendCopyDictionaryLists(ReVal
, Library
.PcdComments
)
1044 ## Get the list of PCDs from dependent libraries
1046 # @retval list The list of PCD
1049 def LibraryPcdList(self
):
1054 # get PCDs from dependent libraries
1055 for Library
in self
.DependentLibraryList
:
1056 PcdsInLibrary
= OrderedDict()
1057 for Key
in Library
.Pcds
:
1058 # skip duplicated PCDs
1059 if Key
in self
.Module
.Pcds
or Key
in Pcds
:
1062 PcdsInLibrary
[Key
] = copy
.copy(Library
.Pcds
[Key
])
1063 RetVal
.extend(self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, PcdsInLibrary
, Library
=Library
))
1066 ## Get the GUID value mapping
1068 # @retval dict The mapping between GUID cname and its value
1072 RetVal
= self
.Module
.Guids
1073 for Library
in self
.DependentLibraryList
:
1074 RetVal
.update(Library
.Guids
)
1075 ExtendCopyDictionaryLists(self
._GuidComments
, Library
.GuidComments
)
1076 ExtendCopyDictionaryLists(self
._GuidComments
, self
.Module
.GuidComments
)
1080 def GetGuidsUsedByPcd(self
):
1081 RetVal
= OrderedDict(self
.Module
.GetGuidsUsedByPcd())
1082 for Library
in self
.DependentLibraryList
:
1083 RetVal
.update(Library
.GetGuidsUsedByPcd())
1085 ## Get the protocol value mapping
1087 # @retval dict The mapping between protocol cname and its value
1090 def ProtocolList(self
):
1091 RetVal
= OrderedDict(self
.Module
.Protocols
)
1092 for Library
in self
.DependentLibraryList
:
1093 RetVal
.update(Library
.Protocols
)
1094 ExtendCopyDictionaryLists(self
._ProtocolComments
, Library
.ProtocolComments
)
1095 ExtendCopyDictionaryLists(self
._ProtocolComments
, self
.Module
.ProtocolComments
)
1098 ## Get the PPI value mapping
1100 # @retval dict The mapping between PPI cname and its value
1104 RetVal
= OrderedDict(self
.Module
.Ppis
)
1105 for Library
in self
.DependentLibraryList
:
1106 RetVal
.update(Library
.Ppis
)
1107 ExtendCopyDictionaryLists(self
._PpiComments
, Library
.PpiComments
)
1108 ExtendCopyDictionaryLists(self
._PpiComments
, self
.Module
.PpiComments
)
1111 ## Get the list of include search path
1113 # @retval list The list path
1116 def IncludePathList(self
):
1118 RetVal
.append(self
.MetaFile
.Dir
)
1119 RetVal
.append(self
.DebugDir
)
1121 for Package
in self
.PackageList
:
1122 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1123 if PackageDir
not in RetVal
:
1124 RetVal
.append(PackageDir
)
1125 IncludesList
= Package
.Includes
1126 if Package
._PrivateIncludes
:
1127 if not self
.MetaFile
.OriginalPath
.Path
.startswith(PackageDir
):
1128 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1129 for Inc
in IncludesList
:
1130 if Inc
not in RetVal
:
1131 RetVal
.append(str(Inc
))
1132 RetVal
.extend(self
.IncPathFromBuildOptions
)
1136 def IncPathFromBuildOptions(self
):
1138 for tool
in self
.BuildOption
:
1139 if 'FLAGS' in self
.BuildOption
[tool
]:
1140 flags
= self
.BuildOption
[tool
]['FLAGS']
1142 for flag
in flags
.split(" "):
1144 if flag
.startswith(("/I","-I")):
1146 if os
.path
.exists(flag
[2:]):
1147 IncPathList
.append(flag
[2:])
1151 if whitespace
and flag
:
1152 if os
.path
.exists(flag
):
1153 IncPathList
.append(flag
)
1158 def IncludePathLength(self
):
1159 return sum(len(inc
)+1 for inc
in self
.IncludePathList
)
1161 ## Get the list of include paths from the packages
1163 # @IncludesList list The list path
1166 def PackageIncludePathList(self
):
1168 for Package
in self
.PackageList
:
1169 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1170 IncludesList
= Package
.Includes
1171 if Package
._PrivateIncludes
:
1172 if not self
.MetaFile
.Path
.startswith(PackageDir
):
1173 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1176 ## Get HII EX PCDs which maybe used by VFR
1178 # efivarstore used by VFR may relate with HII EX PCDs
1179 # Get the variable name and GUID from efivarstore and HII EX PCD
1180 # List the HII EX PCDs in As Built INF if both name and GUID match.
1182 # @retval list HII EX PCDs
1184 def _GetPcdsMaybeUsedByVfr(self
):
1185 if not self
.SourceFileList
:
1189 for SrcFile
in self
.SourceFileList
:
1190 if SrcFile
.Ext
.lower() != '.vfr':
1192 Vfri
= os
.path
.join(self
.OutputDir
, SrcFile
.BaseName
+ '.i')
1193 if not os
.path
.exists(Vfri
):
1195 VfriFile
= open(Vfri
, 'r')
1196 Content
= VfriFile
.read()
1198 Pos
= Content
.find('efivarstore')
1201 # Make sure 'efivarstore' is the start of efivarstore statement
1202 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1205 while Index
>= 0 and Content
[Index
] in ' \t\r\n':
1207 if Index
>= 0 and Content
[Index
] != ';':
1208 Pos
= Content
.find('efivarstore', Pos
+ len('efivarstore'))
1211 # 'efivarstore' must be followed by name and guid
1213 Name
= gEfiVarStoreNamePattern
.search(Content
, Pos
)
1216 Guid
= gEfiVarStoreGuidPattern
.search(Content
, Pos
)
1219 NameArray
= _ConvertStringToByteArray('L"' + Name
.group(1) + '"')
1220 NameGuids
.add((NameArray
, GuidStructureStringToGuidString(Guid
.group(1))))
1221 Pos
= Content
.find('efivarstore', Name
.end())
1225 for Pcd
in self
.PlatformInfo
.Pcds
.values():
1226 if Pcd
.Type
!= TAB_PCDS_DYNAMIC_EX_HII
:
1228 for SkuInfo
in Pcd
.SkuInfoList
.values():
1229 Value
= GuidValue(SkuInfo
.VariableGuid
, self
.PlatformInfo
.PackageList
, self
.MetaFile
.Path
)
1232 Name
= _ConvertStringToByteArray(SkuInfo
.VariableName
)
1233 Guid
= GuidStructureStringToGuidString(Value
)
1234 if (Name
, Guid
) in NameGuids
and Pcd
not in HiiExPcds
:
1235 HiiExPcds
.append(Pcd
)
1240 def _GenOffsetBin(self
):
1242 for SourceFile
in self
.Module
.Sources
:
1243 if SourceFile
.Type
.upper() == ".VFR" :
1245 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1247 VfrUniBaseName
[SourceFile
.BaseName
] = (SourceFile
.BaseName
+ "Bin")
1248 elif SourceFile
.Type
.upper() == ".UNI" :
1250 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1252 VfrUniBaseName
["UniOffsetName"] = (self
.Name
+ "Strings")
1254 if not VfrUniBaseName
:
1256 MapFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".map")
1257 EfiFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".efi")
1258 VfrUniOffsetList
= GetVariableOffset(MapFileName
, EfiFileName
, list(VfrUniBaseName
.values()))
1259 if not VfrUniOffsetList
:
1262 OutputName
= '%sOffset.bin' % self
.Name
1263 UniVfrOffsetFileName
= os
.path
.join( self
.OutputDir
, OutputName
)
1266 fInputfile
= open(UniVfrOffsetFileName
, "wb+", 0)
1268 EdkLogger
.error("build", FILE_OPEN_FAILURE
, "File open failed for %s" % UniVfrOffsetFileName
, None)
1270 # Use a instance of BytesIO to cache data
1271 fStringIO
= BytesIO()
1273 for Item
in VfrUniOffsetList
:
1274 if (Item
[0].find("Strings") != -1):
1276 # UNI offset in image.
1278 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1280 UniGuid
= b
'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1281 fStringIO
.write(UniGuid
)
1282 UniValue
= pack ('Q', int (Item
[1], 16))
1283 fStringIO
.write (UniValue
)
1286 # VFR binary offset in image.
1288 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1290 VfrGuid
= b
'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1291 fStringIO
.write(VfrGuid
)
1292 VfrValue
= pack ('Q', int (Item
[1], 16))
1293 fStringIO
.write (VfrValue
)
1295 # write data into file.
1298 fInputfile
.write (fStringIO
.getvalue())
1300 EdkLogger
.error("build", FILE_WRITE_FAILURE
, "Write data to file %s failed, please check whether the "
1301 "file been locked or using by other applications." %UniVfrOffsetFileName
, None)
1308 def OutputFile(self
):
1311 for Root
, Dirs
, Files
in os
.walk(self
.BuildDir
):
1313 # lib file is already added through above CodaTargetList, skip it here
1314 if not (File
.lower().endswith('.obj') or File
.lower().endswith('.debug')):
1315 NewFile
= path
.join(Root
, File
)
1318 for Root
, Dirs
, Files
in os
.walk(self
.FfsOutputDir
):
1320 NewFile
= path
.join(Root
, File
)
1325 ## Create AsBuilt INF file the module
1327 def CreateAsBuiltInf(self
):
1329 if self
.IsAsBuiltInfCreated
:
1332 # Skip INF file generation for libraries
1336 # Skip the following code for modules with no source files
1337 if not self
.SourceFileList
:
1340 # Skip the following code for modules without any binary files
1341 if self
.BinaryFileList
:
1344 ### TODO: How to handles mixed source and binary modules
1346 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1347 # Also find all packages that the DynamicEx PCDs depend on
1352 PcdTokenSpaceList
= []
1353 for Pcd
in self
.ModulePcdList
+ self
.LibraryPcdList
:
1354 if Pcd
.Type
== TAB_PCDS_PATCHABLE_IN_MODULE
:
1355 PatchablePcds
.append(Pcd
)
1356 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_PATCHABLE_IN_MODULE
))
1357 elif Pcd
.Type
in PCD_DYNAMIC_EX_TYPE_SET
:
1360 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
))
1361 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
))
1362 PcdTokenSpaceList
.append(Pcd
.TokenSpaceGuidCName
)
1363 GuidList
= OrderedDict(self
.GuidList
)
1364 for TokenSpace
in self
.GetGuidsUsedByPcd
:
1365 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1366 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1367 if TokenSpace
not in PcdTokenSpaceList
and TokenSpace
in GuidList
:
1368 GuidList
.pop(TokenSpace
)
1369 CheckList
= (GuidList
, self
.PpiList
, self
.ProtocolList
, PcdCheckList
)
1370 for Package
in self
.DerivedPackageList
:
1371 if Package
in Packages
:
1373 BeChecked
= (Package
.Guids
, Package
.Ppis
, Package
.Protocols
, Package
.Pcds
)
1375 for Index
in range(len(BeChecked
)):
1376 for Item
in CheckList
[Index
]:
1377 if Item
in BeChecked
[Index
]:
1378 Packages
.append(Package
)
1384 VfrPcds
= self
._GetPcdsMaybeUsedByVfr
()
1385 for Pkg
in self
.PlatformInfo
.PackageList
:
1388 for VfrPcd
in VfrPcds
:
1389 if ((VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
) in Pkg
.Pcds
or
1390 (VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
) in Pkg
.Pcds
):
1391 Packages
.append(Pkg
)
1394 ModuleType
= SUP_MODULE_DXE_DRIVER
if self
.ModuleType
== SUP_MODULE_UEFI_DRIVER
and self
.DepexGenerated
else self
.ModuleType
1395 DriverType
= self
.PcdIsDriver
if self
.PcdIsDriver
else ''
1397 MDefs
= self
.Module
.Defines
1400 'module_name' : self
.Name
,
1401 'module_guid' : Guid
,
1402 'module_module_type' : ModuleType
,
1403 'module_version_string' : [MDefs
['VERSION_STRING']] if 'VERSION_STRING' in MDefs
else [],
1404 'pcd_is_driver_string' : [],
1405 'module_uefi_specification_version' : [],
1406 'module_pi_specification_version' : [],
1407 'module_entry_point' : self
.Module
.ModuleEntryPointList
,
1408 'module_unload_image' : self
.Module
.ModuleUnloadImageList
,
1409 'module_constructor' : self
.Module
.ConstructorList
,
1410 'module_destructor' : self
.Module
.DestructorList
,
1411 'module_shadow' : [MDefs
['SHADOW']] if 'SHADOW' in MDefs
else [],
1412 'module_pci_vendor_id' : [MDefs
['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs
else [],
1413 'module_pci_device_id' : [MDefs
['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs
else [],
1414 'module_pci_class_code' : [MDefs
['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs
else [],
1415 'module_pci_revision' : [MDefs
['PCI_REVISION']] if 'PCI_REVISION' in MDefs
else [],
1416 'module_build_number' : [MDefs
['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs
else [],
1417 'module_spec' : [MDefs
['SPEC']] if 'SPEC' in MDefs
else [],
1418 'module_uefi_hii_resource_section' : [MDefs
['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs
else [],
1419 'module_uni_file' : [MDefs
['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs
else [],
1420 'module_arch' : self
.Arch
,
1421 'package_item' : [Package
.MetaFile
.File
.replace('\\', '/') for Package
in Packages
],
1423 'patchablepcd_item' : [],
1425 'protocol_item' : [],
1429 'libraryclasses_item' : []
1432 if 'MODULE_UNI_FILE' in MDefs
:
1433 UNIFile
= os
.path
.join(self
.MetaFile
.Dir
, MDefs
['MODULE_UNI_FILE'])
1434 if os
.path
.isfile(UNIFile
):
1435 shutil
.copy2(UNIFile
, self
.OutputDir
)
1437 if self
.AutoGenVersion
> int(gInfSpecVersion
, 0):
1438 AsBuiltInfDict
['module_inf_version'] = '0x%08x' % self
.AutoGenVersion
1440 AsBuiltInfDict
['module_inf_version'] = gInfSpecVersion
1443 AsBuiltInfDict
['pcd_is_driver_string'].append(DriverType
)
1445 if 'UEFI_SPECIFICATION_VERSION' in self
.Specification
:
1446 AsBuiltInfDict
['module_uefi_specification_version'].append(self
.Specification
['UEFI_SPECIFICATION_VERSION'])
1447 if 'PI_SPECIFICATION_VERSION' in self
.Specification
:
1448 AsBuiltInfDict
['module_pi_specification_version'].append(self
.Specification
['PI_SPECIFICATION_VERSION'])
1450 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1451 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1452 for Item
in self
.CodaTargetList
:
1453 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1454 if os
.path
.isabs(File
):
1455 File
= File
.replace('\\', '/').strip('/').replace(OutputDir
, '').strip('/')
1456 if Item
.Target
.Ext
.lower() == '.aml':
1457 AsBuiltInfDict
['binary_item'].append('ASL|' + File
)
1458 elif Item
.Target
.Ext
.lower() == '.acpi':
1459 AsBuiltInfDict
['binary_item'].append('ACPI|' + File
)
1460 elif Item
.Target
.Ext
.lower() == '.efi':
1461 AsBuiltInfDict
['binary_item'].append('PE32|' + self
.Name
+ '.efi')
1463 AsBuiltInfDict
['binary_item'].append('BIN|' + File
)
1464 if not self
.DepexGenerated
:
1465 DepexFile
= os
.path
.join(self
.OutputDir
, self
.Name
+ '.depex')
1466 if os
.path
.exists(DepexFile
):
1467 self
.DepexGenerated
= True
1468 if self
.DepexGenerated
:
1469 if self
.ModuleType
in [SUP_MODULE_PEIM
]:
1470 AsBuiltInfDict
['binary_item'].append('PEI_DEPEX|' + self
.Name
+ '.depex')
1471 elif self
.ModuleType
in [SUP_MODULE_DXE_DRIVER
, SUP_MODULE_DXE_RUNTIME_DRIVER
, SUP_MODULE_DXE_SAL_DRIVER
, SUP_MODULE_UEFI_DRIVER
]:
1472 AsBuiltInfDict
['binary_item'].append('DXE_DEPEX|' + self
.Name
+ '.depex')
1473 elif self
.ModuleType
in [SUP_MODULE_DXE_SMM_DRIVER
]:
1474 AsBuiltInfDict
['binary_item'].append('SMM_DEPEX|' + self
.Name
+ '.depex')
1476 Bin
= self
._GenOffsetBin
()
1478 AsBuiltInfDict
['binary_item'].append('BIN|%s' % Bin
)
1480 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1482 if File
.lower().endswith('.pdb'):
1483 AsBuiltInfDict
['binary_item'].append('DISPOSABLE|' + File
)
1484 HeaderComments
= self
.Module
.HeaderComments
1486 for Index
in range(len(HeaderComments
)):
1487 if HeaderComments
[Index
].find('@BinaryHeader') != -1:
1488 HeaderComments
[Index
] = HeaderComments
[Index
].replace('@BinaryHeader', '@file')
1491 AsBuiltInfDict
['header_comments'] = '\n'.join(HeaderComments
[StartPos
:]).replace(':#', '://')
1492 AsBuiltInfDict
['tail_comments'] = '\n'.join(self
.Module
.TailComments
)
1495 (self
.ProtocolList
, self
._ProtocolComments
, 'protocol_item'),
1496 (self
.PpiList
, self
._PpiComments
, 'ppi_item'),
1497 (GuidList
, self
._GuidComments
, 'guid_item')
1499 for Item
in GenList
:
1500 for CName
in Item
[0]:
1501 Comments
= '\n '.join(Item
[1][CName
]) if CName
in Item
[1] else ''
1502 Entry
= Comments
+ '\n ' + CName
if Comments
else CName
1503 AsBuiltInfDict
[Item
[2]].append(Entry
)
1504 PatchList
= parsePcdInfoFromMapFile(
1505 os
.path
.join(self
.OutputDir
, self
.Name
+ '.map'),
1506 os
.path
.join(self
.OutputDir
, self
.Name
+ '.efi')
1509 for Pcd
in PatchablePcds
:
1510 TokenCName
= Pcd
.TokenCName
1511 for PcdItem
in GlobalData
.MixedPcd
:
1512 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1513 TokenCName
= PcdItem
[0]
1515 for PatchPcd
in PatchList
:
1516 if TokenCName
== PatchPcd
[0]:
1521 if Pcd
.DatumType
== 'BOOLEAN':
1522 BoolValue
= Pcd
.DefaultValue
.upper()
1523 if BoolValue
== 'TRUE':
1524 Pcd
.DefaultValue
= '1'
1525 elif BoolValue
== 'FALSE':
1526 Pcd
.DefaultValue
= '0'
1528 if Pcd
.DatumType
in TAB_PCD_NUMERIC_TYPES
:
1529 HexFormat
= '0x%02x'
1530 if Pcd
.DatumType
== TAB_UINT16
:
1531 HexFormat
= '0x%04x'
1532 elif Pcd
.DatumType
== TAB_UINT32
:
1533 HexFormat
= '0x%08x'
1534 elif Pcd
.DatumType
== TAB_UINT64
:
1535 HexFormat
= '0x%016x'
1536 PcdValue
= HexFormat
% int(Pcd
.DefaultValue
, 0)
1538 if Pcd
.MaxDatumSize
is None or Pcd
.MaxDatumSize
== '':
1539 EdkLogger
.error("build", AUTOGEN_ERROR
,
1540 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1542 ArraySize
= int(Pcd
.MaxDatumSize
, 0)
1543 PcdValue
= Pcd
.DefaultValue
1544 if PcdValue
[0] != '{':
1546 if PcdValue
[0] == 'L':
1548 PcdValue
= PcdValue
.lstrip('L')
1549 PcdValue
= eval(PcdValue
)
1551 for Index
in range(0, len(PcdValue
)):
1553 CharVal
= ord(PcdValue
[Index
])
1554 NewValue
= NewValue
+ '0x%02x' % (CharVal
& 0x00FF) + ', ' \
1555 + '0x%02x' % (CharVal
>> 8) + ', '
1557 NewValue
= NewValue
+ '0x%02x' % (ord(PcdValue
[Index
]) % 0x100) + ', '
1560 Padding
= Padding
* 2
1561 ArraySize
= ArraySize
// 2
1562 if ArraySize
< (len(PcdValue
) + 1):
1563 if Pcd
.MaxSizeUserSet
:
1564 EdkLogger
.error("build", AUTOGEN_ERROR
,
1565 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1568 ArraySize
= len(PcdValue
) + 1
1569 if ArraySize
> len(PcdValue
) + 1:
1570 NewValue
= NewValue
+ Padding
* (ArraySize
- len(PcdValue
) - 1)
1571 PcdValue
= NewValue
+ Padding
.strip().rstrip(',') + '}'
1572 elif len(PcdValue
.split(',')) <= ArraySize
:
1573 PcdValue
= PcdValue
.rstrip('}') + ', 0x00' * (ArraySize
- len(PcdValue
.split(',')))
1576 if Pcd
.MaxSizeUserSet
:
1577 EdkLogger
.error("build", AUTOGEN_ERROR
,
1578 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1581 ArraySize
= len(PcdValue
) + 1
1582 PcdItem
= '%s.%s|%s|0x%X' % \
1583 (Pcd
.TokenSpaceGuidCName
, TokenCName
, PcdValue
, PatchPcd
[1])
1585 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1586 PcdComments
= '\n '.join(self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
])
1588 PcdItem
= PcdComments
+ '\n ' + PcdItem
1589 AsBuiltInfDict
['patchablepcd_item'].append(PcdItem
)
1591 for Pcd
in Pcds
+ VfrPcds
:
1594 TokenCName
= Pcd
.TokenCName
1595 for PcdItem
in GlobalData
.MixedPcd
:
1596 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1597 TokenCName
= PcdItem
[0]
1599 if Pcd
.Type
== TAB_PCDS_DYNAMIC_EX_HII
:
1600 for SkuName
in Pcd
.SkuInfoList
:
1601 SkuInfo
= Pcd
.SkuInfoList
[SkuName
]
1602 HiiInfo
= '## %s|%s|%s' % (SkuInfo
.VariableName
, SkuInfo
.VariableGuid
, SkuInfo
.VariableOffset
)
1604 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1605 PcdCommentList
= self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
][:]
1609 for Index
, Comment
in enumerate(PcdCommentList
):
1610 for Usage
in UsageList
:
1611 if Comment
.find(Usage
) != -1:
1615 if UsageIndex
!= -1:
1616 PcdCommentList
[UsageIndex
] = '## %s %s %s' % (UsageStr
, HiiInfo
, PcdCommentList
[UsageIndex
].replace(UsageStr
, ''))
1618 PcdCommentList
.append('## UNDEFINED ' + HiiInfo
)
1619 PcdComments
= '\n '.join(PcdCommentList
)
1620 PcdEntry
= Pcd
.TokenSpaceGuidCName
+ '.' + TokenCName
1622 PcdEntry
= PcdComments
+ '\n ' + PcdEntry
1623 AsBuiltInfDict
['pcd_item'].append(PcdEntry
)
1624 for Item
in self
.BuildOption
:
1625 if 'FLAGS' in self
.BuildOption
[Item
]:
1626 AsBuiltInfDict
['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self
.ToolChainFamily
, self
.BuildTarget
, self
.ToolChain
, self
.Arch
, Item
, self
.BuildOption
[Item
]['FLAGS'].strip()))
1628 # Generated LibraryClasses section in comments.
1629 for Library
in self
.LibraryAutoGenList
:
1630 AsBuiltInfDict
['libraryclasses_item'].append(Library
.MetaFile
.File
.replace('\\', '/'))
1632 # Generated UserExtensions TianoCore section.
1633 # All tianocore user extensions are copied.
1635 for TianoCore
in self
._GetTianoCoreUserExtensionList
():
1636 UserExtStr
+= '\n'.join(TianoCore
)
1637 ExtensionFile
= os
.path
.join(self
.MetaFile
.Dir
, TianoCore
[1])
1638 if os
.path
.isfile(ExtensionFile
):
1639 shutil
.copy2(ExtensionFile
, self
.OutputDir
)
1640 AsBuiltInfDict
['userextension_tianocore_item'] = UserExtStr
1642 # Generated depex expression section in comments.
1643 DepexExpression
= self
._GetDepexExpresionString
()
1644 AsBuiltInfDict
['depexsection_item'] = DepexExpression
if DepexExpression
else ''
1646 AsBuiltInf
= TemplateString()
1647 AsBuiltInf
.Append(gAsBuiltInfHeaderString
.Replace(AsBuiltInfDict
))
1649 SaveFileOnChange(os
.path
.join(self
.OutputDir
, self
.Name
+ '.inf'), str(AsBuiltInf
), False)
1651 self
.IsAsBuiltInfCreated
= True
1653 def CacheCopyFile(self
, OriginDir
, CopyDir
, File
):
1654 sub_dir
= os
.path
.relpath(File
, CopyDir
)
1655 destination_file
= os
.path
.join(OriginDir
, sub_dir
)
1656 destination_dir
= os
.path
.dirname(destination_file
)
1657 CreateDirectory(destination_dir
)
1659 CopyFileOnChange(File
, destination_dir
)
1661 EdkLogger
.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File
, destination_dir
))
1664 def CopyModuleToCache(self
):
1665 self
.GenPreMakefileHash(GlobalData
.gCacheIR
)
1666 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1667 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1668 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1671 self
.GenMakeHash(GlobalData
.gCacheIR
)
1672 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1673 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1674 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1675 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1678 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1679 FileDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
, MakeHashStr
)
1680 FfsDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
, MakeHashStr
)
1682 CreateDirectory (FileDir
)
1683 self
.SaveHashChainFileToCache(GlobalData
.gCacheIR
)
1684 ModuleFile
= path
.join(self
.OutputDir
, self
.Name
+ '.inf')
1685 if os
.path
.exists(ModuleFile
):
1686 CopyFileOnChange(ModuleFile
, FileDir
)
1687 if not self
.OutputFile
:
1688 Ma
= self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
1689 self
.OutputFile
= Ma
.Binaries
1690 for File
in self
.OutputFile
:
1691 if os
.path
.exists(File
):
1692 if File
.startswith(os
.path
.abspath(self
.FfsOutputDir
)+os
.sep
):
1693 self
.CacheCopyFile(FfsDir
, self
.FfsOutputDir
, File
)
1695 self
.CacheCopyFile(FileDir
, self
.OutputDir
, File
)
1697 def SaveHashChainFileToCache(self
, gDict
):
1698 if not GlobalData
.gBinCacheDest
:
1701 self
.GenPreMakefileHash(gDict
)
1702 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1703 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1704 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1707 self
.GenMakeHash(gDict
)
1708 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1709 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1710 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1711 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1714 # save the hash chain list as cache file
1715 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1716 CacheDestDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
1717 CacheHashDestDir
= path
.join(CacheDestDir
, MakeHashStr
)
1718 ModuleHashPair
= path
.join(CacheDestDir
, self
.Name
+ ".ModuleHashPair")
1719 MakeHashChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".MakeHashChain")
1720 ModuleFilesChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".ModuleFilesChain")
1722 # save the HashChainDict as json file
1723 CreateDirectory (CacheDestDir
)
1724 CreateDirectory (CacheHashDestDir
)
1726 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1727 if os
.path
.exists(ModuleHashPair
):
1728 with
open(ModuleHashPair
, 'r') as f
:
1729 ModuleHashPairList
= json
.load(f
)
1730 PreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
1731 MakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
1732 ModuleHashPairList
.append((PreMakeHash
, MakeHash
))
1733 ModuleHashPairList
= list(set(map(tuple, ModuleHashPairList
)))
1734 with
open(ModuleHashPair
, 'w') as f
:
1735 json
.dump(ModuleHashPairList
, f
, indent
=2)
1737 EdkLogger
.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair
)
1741 with
open(MakeHashChain
, 'w') as f
:
1742 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
, f
, indent
=2)
1744 EdkLogger
.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain
)
1748 with
open(ModuleFilesChain
, 'w') as f
:
1749 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
, f
, indent
=2)
1751 EdkLogger
.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain
)
1754 # save the autogenfile and makefile for debug usage
1755 CacheDebugDir
= path
.join(CacheHashDestDir
, "CacheDebug")
1756 CreateDirectory (CacheDebugDir
)
1757 CopyFileOnChange(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
, CacheDebugDir
)
1758 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1759 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1760 CopyFileOnChange(str(File
), CacheDebugDir
)
1764 ## Create makefile for the module and its dependent libraries
1766 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1767 # dependent libraries will be created
1769 @cached_class_function
1770 def CreateMakeFile(self
, CreateLibraryMakeFile
=True, GenFfsList
= []):
1771 gDict
= GlobalData
.gCacheIR
1772 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1773 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
1776 # nest this function inside it's only caller.
1777 def CreateTimeStamp():
1778 FileSet
= {self
.MetaFile
.Path
}
1780 for SourceFile
in self
.Module
.Sources
:
1781 FileSet
.add (SourceFile
.Path
)
1783 for Lib
in self
.DependentLibraryList
:
1784 FileSet
.add (Lib
.MetaFile
.Path
)
1786 for f
in self
.AutoGenDepSet
:
1787 FileSet
.add (f
.Path
)
1789 if os
.path
.exists (self
.TimeStampPath
):
1790 os
.remove (self
.TimeStampPath
)
1792 SaveFileOnChange(self
.TimeStampPath
, "\n".join(FileSet
), False)
1794 # Ignore generating makefile when it is a binary module
1795 if self
.IsBinaryModule
:
1798 self
.GenFfsList
= GenFfsList
1800 if not self
.IsLibrary
and CreateLibraryMakeFile
:
1801 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1802 LibraryAutoGen
.CreateMakeFile()
1804 # CanSkip uses timestamps to determine build skipping
1808 if len(self
.CustomMakefile
) == 0:
1809 Makefile
= GenMake
.ModuleMakefile(self
)
1811 Makefile
= GenMake
.CustomMakefile(self
)
1812 if Makefile
.Generate():
1813 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated makefile for module %s [%s]" %
1814 (self
.Name
, self
.Arch
))
1816 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of makefile for module %s [%s]" %
1817 (self
.Name
, self
.Arch
))
1821 MakefileType
= Makefile
._FileType
1822 MakefileName
= Makefile
._FILE
_NAME
_[MakefileType
]
1823 MakefilePath
= os
.path
.join(self
.MakeFileDir
, MakefileName
)
1825 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1826 MewIR
.MakefilePath
= MakefilePath
1827 MewIR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1828 MewIR
.CreateMakeFileDone
= True
1829 with GlobalData
.cache_lock
:
1831 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1832 IR
.MakefilePath
= MakefilePath
1833 IR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1834 IR
.CreateMakeFileDone
= True
1835 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1837 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1839 def CopyBinaryFiles(self
):
1840 for File
in self
.Module
.Binaries
:
1842 DstPath
= os
.path
.join(self
.OutputDir
, os
.path
.basename(SrcPath
))
1843 CopyLongFilePath(SrcPath
, DstPath
)
1844 ## Create autogen code for the module and its dependent libraries
1846 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1847 # dependent libraries will be created
1849 def CreateCodeFile(self
, CreateLibraryCodeFile
=True):
1850 gDict
= GlobalData
.gCacheIR
1851 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1852 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
1855 if self
.IsCodeFileCreated
:
1858 # Need to generate PcdDatabase even PcdDriver is binarymodule
1859 if self
.IsBinaryModule
and self
.PcdIsDriver
!= '':
1860 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
1862 if self
.IsBinaryModule
:
1864 self
.CopyBinaryFiles()
1867 if not self
.IsLibrary
and CreateLibraryCodeFile
:
1868 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1869 LibraryAutoGen
.CreateCodeFile()
1871 # CanSkip uses timestamps to determine build skipping
1874 self
.LibraryAutoGenList
1876 IgoredAutoGenList
= []
1878 for File
in self
.AutoGenFileList
:
1879 if GenC
.Generate(File
.Path
, self
.AutoGenFileList
[File
], File
.IsBinary
):
1880 AutoGenList
.append(str(File
))
1882 IgoredAutoGenList
.append(str(File
))
1885 for ModuleType
in self
.DepexList
:
1886 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1887 if len(self
.DepexList
[ModuleType
]) == 0 or ModuleType
== SUP_MODULE_USER_DEFINED
or ModuleType
== SUP_MODULE_HOST_APPLICATION
:
1890 Dpx
= GenDepex
.DependencyExpression(self
.DepexList
[ModuleType
], ModuleType
, True)
1891 DpxFile
= gAutoGenDepexFileName
% {"module_name" : self
.Name
}
1893 if len(Dpx
.PostfixNotation
) != 0:
1894 self
.DepexGenerated
= True
1896 if Dpx
.Generate(path
.join(self
.OutputDir
, DpxFile
)):
1897 AutoGenList
.append(str(DpxFile
))
1899 IgoredAutoGenList
.append(str(DpxFile
))
1901 if IgoredAutoGenList
== []:
1902 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] files for module %s [%s]" %
1903 (" ".join(AutoGenList
), self
.Name
, self
.Arch
))
1904 elif AutoGenList
== []:
1905 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of [%s] files for module %s [%s]" %
1906 (" ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1908 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] (skipped %s) files for module %s [%s]" %
1909 (" ".join(AutoGenList
), " ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1911 self
.IsCodeFileCreated
= True
1912 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1913 MewIR
.CreateCodeFileDone
= True
1914 with GlobalData
.cache_lock
:
1916 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1917 IR
.CreateCodeFileDone
= True
1918 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1920 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1924 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1926 def LibraryAutoGenList(self
):
1928 for Library
in self
.DependentLibraryList
:
1935 self
.PlatformInfo
.MetaFile
,
1939 if La
not in RetVal
:
1941 for Lib
in La
.CodaTargetList
:
1942 self
._ApplyBuildRule
(Lib
.Target
, TAB_UNKNOWN_FILE
)
1945 def GenModuleHash(self
):
1946 # Initialize a dictionary for each arch type
1947 if self
.Arch
not in GlobalData
.gModuleHash
:
1948 GlobalData
.gModuleHash
[self
.Arch
] = {}
1950 # Early exit if module or library has been hashed and is in memory
1951 if self
.Name
in GlobalData
.gModuleHash
[self
.Arch
]:
1952 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1954 # Initialze hash object
1957 # Add Platform level hash
1958 m
.update(GlobalData
.gPlatformHash
.encode('utf-8'))
1960 # Add Package level hash
1961 if self
.DependentPackageList
:
1962 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
1963 if Pkg
.PackageName
in GlobalData
.gPackageHash
:
1964 m
.update(GlobalData
.gPackageHash
[Pkg
.PackageName
].encode('utf-8'))
1967 if self
.LibraryAutoGenList
:
1968 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
1969 if Lib
.Name
not in GlobalData
.gModuleHash
[self
.Arch
]:
1971 m
.update(GlobalData
.gModuleHash
[self
.Arch
][Lib
.Name
].encode('utf-8'))
1974 with
open(str(self
.MetaFile
), 'rb') as f
:
1978 # Add Module's source files
1979 if self
.SourceFileList
:
1980 for File
in sorted(self
.SourceFileList
, key
=lambda x
: str(x
)):
1981 f
= open(str(File
), 'rb')
1986 GlobalData
.gModuleHash
[self
.Arch
][self
.Name
] = m
.hexdigest()
1988 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1990 def GenModuleFilesHash(self
, gDict
):
1991 # Early exit if module or library has been hashed and is in memory
1992 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
1993 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
:
1994 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1996 # skip if the module cache already crashed
1997 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1998 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2001 DependencyFileSet
= set()
2002 # Add Module Meta file
2003 DependencyFileSet
.add(self
.MetaFile
)
2005 # Add Module's source files
2006 if self
.SourceFileList
:
2007 for File
in set(self
.SourceFileList
):
2008 DependencyFileSet
.add(File
)
2010 # Add modules's include header files
2011 # Search dependency file list for each source file
2014 for Target
in self
.IntroTargetList
:
2015 SourceFileList
.extend(Target
.Inputs
)
2016 OutPutFileList
.extend(Target
.Outputs
)
2018 for Item
in OutPutFileList
:
2019 if Item
in SourceFileList
:
2020 SourceFileList
.remove(Item
)
2022 for file_path
in self
.IncludePathList
+ self
.BuildOptionIncPathList
:
2023 # skip the folders in platform BuildDir which are not been generated yet
2024 if file_path
.startswith(os
.path
.abspath(self
.PlatformInfo
.BuildDir
)+os
.sep
):
2026 SearchList
.append(file_path
)
2027 FileDependencyDict
= {}
2028 ForceIncludedFile
= []
2029 for F
in SourceFileList
:
2030 # skip the files which are not been generated yet, because
2031 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
2032 if not os
.path
.exists(F
.Path
):
2034 FileDependencyDict
[F
] = GenMake
.GetDependencyList(self
, self
.FileDependCache
, F
, ForceIncludedFile
, SearchList
)
2036 if FileDependencyDict
:
2037 for Dependency
in FileDependencyDict
.values():
2038 DependencyFileSet
.update(set(Dependency
))
2040 # Caculate all above dependency files hash
2041 # Initialze hash object
2044 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2045 if not os
.path
.exists(str(File
)):
2046 EdkLogger
.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2048 with
open(str(File
), 'rb') as f
:
2051 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2054 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
2055 MewIR
.ModuleFilesHashDigest
= m
.digest()
2056 MewIR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2057 MewIR
.ModuleFilesChain
= FileList
2058 with GlobalData
.cache_lock
:
2060 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2061 IR
.ModuleFilesHashDigest
= m
.digest()
2062 IR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2063 IR
.ModuleFilesChain
= FileList
2064 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2066 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
2068 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2070 def GenPreMakefileHash(self
, gDict
):
2071 # Early exit if module or library has been hashed and is in memory
2072 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2073 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2074 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2076 # skip if the module cache already crashed
2077 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2078 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2081 # skip binary module
2082 if self
.IsBinaryModule
:
2085 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2086 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2087 self
.GenModuleFilesHash(gDict
)
2089 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2090 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2091 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2094 # Initialze hash object
2097 # Add Platform level hash
2098 if ('PlatformHash') in gDict
:
2099 m
.update(gDict
[('PlatformHash')].encode('utf-8'))
2101 EdkLogger
.quiet("[cache warning]: PlatformHash is missing")
2103 # Add Package level hash
2104 if self
.DependentPackageList
:
2105 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
2106 if (Pkg
.PackageName
, 'PackageHash') in gDict
:
2107 m
.update(gDict
[(Pkg
.PackageName
, 'PackageHash')].encode('utf-8'))
2109 EdkLogger
.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg
.PackageName
, self
.MetaFile
.Name
, self
.Arch
))
2112 if self
.LibraryAutoGenList
:
2113 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2114 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2115 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
:
2116 Lib
.GenPreMakefileHash(gDict
)
2117 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
)
2120 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2122 with GlobalData
.cache_lock
:
2123 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2124 IR
.PreMakefileHashHexDigest
= m
.hexdigest()
2125 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2127 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2129 def GenMakeHeaderFilesHash(self
, gDict
):
2130 # Early exit if module or library has been hashed and is in memory
2131 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2132 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2133 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2135 # skip if the module cache already crashed
2136 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2137 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2140 # skip binary module
2141 if self
.IsBinaryModule
:
2144 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2145 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
2147 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.libConstPcd
:
2148 self
.ConstPcd
= GlobalData
.libConstPcd
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2149 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.Refes
:
2150 self
.ReferenceModules
= GlobalData
.Refes
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2151 self
.CreateCodeFile()
2152 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2153 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2154 self
.CreateMakeFile(GenFfsList
=GlobalData
.FfsCmd
.get((self
.MetaFile
.Path
, self
.Arch
),[]))
2156 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2157 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
or \
2158 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2159 EdkLogger
.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2162 DependencyFileSet
= set()
2164 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
:
2165 DependencyFileSet
.add(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
)
2167 EdkLogger
.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2170 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2171 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2172 DependencyFileSet
.add(File
)
2174 EdkLogger
.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2177 if self
.AutoGenFileList
:
2178 for File
in set(self
.AutoGenFileList
):
2179 DependencyFileSet
.add(File
)
2181 # Caculate all above dependency files hash
2182 # Initialze hash object
2185 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2186 if not os
.path
.exists(str(File
)):
2187 EdkLogger
.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2189 f
= open(str(File
), 'rb')
2193 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2195 with GlobalData
.cache_lock
:
2196 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2197 IR
.AutoGenFileList
= self
.AutoGenFileList
.keys()
2198 IR
.MakeHeaderFilesHashChain
= FileList
2199 IR
.MakeHeaderFilesHashDigest
= m
.digest()
2200 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2202 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2204 def GenMakeHash(self
, gDict
):
2205 # Early exit if module or library has been hashed and is in memory
2206 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2207 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2208 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2210 # skip if the module cache already crashed
2211 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2212 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2215 # skip binary module
2216 if self
.IsBinaryModule
:
2219 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2220 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2221 self
.GenModuleFilesHash(gDict
)
2222 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2223 self
.GenMakeHeaderFilesHash(gDict
)
2225 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2226 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
or \
2227 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
or \
2228 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
or \
2229 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
:
2230 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2233 # Initialze hash object
2237 # Add hash of makefile and dependency header files
2238 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
)
2239 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
) - set(MakeHashChain
))
2240 New
.sort(key
=lambda x
: str(x
))
2241 MakeHashChain
+= New
2244 if self
.LibraryAutoGenList
:
2245 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2246 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2247 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
:
2248 Lib
.GenMakeHash(gDict
)
2249 if not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
:
2250 print("Cannot generate MakeHash for lib module:", Lib
.MetaFile
.Path
, Lib
.Arch
)
2252 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
)
2253 New
= list(set(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
) - set(MakeHashChain
))
2254 New
.sort(key
=lambda x
: str(x
))
2255 MakeHashChain
+= New
2258 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2259 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
) - set(MakeHashChain
))
2260 New
.sort(key
=lambda x
: str(x
))
2261 MakeHashChain
+= New
2263 with GlobalData
.cache_lock
:
2264 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2265 IR
.MakeHashDigest
= m
.digest()
2266 IR
.MakeHashHexDigest
= m
.hexdigest()
2267 IR
.MakeHashChain
= MakeHashChain
2268 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2270 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2272 ## Decide whether we can skip the left autogen and make process
2273 def CanSkipbyPreMakefileCache(self
, gDict
):
2274 if not GlobalData
.gBinCacheSource
:
2277 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakeCacheHit
:
2280 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2283 # If Module is binary, do not skip by cache
2284 if self
.IsBinaryModule
:
2287 # .inc is contains binary information so do not skip by hash as well
2288 for f_ext
in self
.SourceFileList
:
2289 if '.inc' in str(f_ext
):
2292 # Get the module hash values from stored cache and currrent build
2293 # then check whether cache hit based on the hash values
2294 # if cache hit, restore all the files from cache
2295 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2296 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2298 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2299 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2300 if not os
.path
.exists(ModuleHashPair
):
2301 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2302 with GlobalData
.cache_lock
:
2303 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2304 IR
.CacheCrash
= True
2305 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2309 with
open(ModuleHashPair
, 'r') as f
:
2310 ModuleHashPairList
= json
.load(f
)
2312 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2315 self
.GenPreMakefileHash(gDict
)
2316 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2317 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2318 EdkLogger
.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2322 CurrentPreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
2323 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2324 if PreMakefileHash
== CurrentPreMakeHash
:
2325 MakeHashStr
= str(MakeHash
)
2330 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2331 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2333 if not os
.path
.exists(TargetHashDir
):
2334 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2337 for root
, dir, files
in os
.walk(TargetHashDir
):
2339 File
= path
.join(root
, f
)
2340 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2341 if os
.path
.exists(TargetFfsHashDir
):
2342 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2344 File
= path
.join(root
, f
)
2345 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2347 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2348 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2350 with GlobalData
.cache_lock
:
2351 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2352 IR
.PreMakeCacheHit
= True
2353 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2354 print("[cache hit]: checkpoint_PreMakefile:", self
.MetaFile
.Path
, self
.Arch
)
2355 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2358 ## Decide whether we can skip the make process
2359 def CanSkipbyMakeCache(self
, gDict
):
2360 if not GlobalData
.gBinCacheSource
:
2363 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2366 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2369 # If Module is binary, do not skip by cache
2370 if self
.IsBinaryModule
:
2371 print("[cache miss]: checkpoint_Makefile: binary module:", self
.MetaFile
.Path
, self
.Arch
)
2374 # .inc is contains binary information so do not skip by hash as well
2375 for f_ext
in self
.SourceFileList
:
2376 if '.inc' in str(f_ext
):
2377 with GlobalData
.cache_lock
:
2378 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2379 IR
.MakeCacheHit
= False
2380 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2381 print("[cache miss]: checkpoint_Makefile: .inc module:", self
.MetaFile
.Path
, self
.Arch
)
2384 # Get the module hash values from stored cache and currrent build
2385 # then check whether cache hit based on the hash values
2386 # if cache hit, restore all the files from cache
2387 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2388 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2390 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2391 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2392 if not os
.path
.exists(ModuleHashPair
):
2393 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2394 with GlobalData
.cache_lock
:
2395 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2396 IR
.CacheCrash
= True
2397 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2401 with
open(ModuleHashPair
, 'r') as f
:
2402 ModuleHashPairList
= json
.load(f
)
2404 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2407 self
.GenMakeHash(gDict
)
2408 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2409 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
2410 EdkLogger
.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2414 CurrentMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
2415 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2416 if MakeHash
== CurrentMakeHash
:
2417 MakeHashStr
= str(MakeHash
)
2420 print("[cache miss]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2423 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2424 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2425 if not os
.path
.exists(TargetHashDir
):
2426 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2429 for root
, dir, files
in os
.walk(TargetHashDir
):
2431 File
= path
.join(root
, f
)
2432 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2434 if os
.path
.exists(TargetFfsHashDir
):
2435 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2437 File
= path
.join(root
, f
)
2438 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2440 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2441 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2442 with GlobalData
.cache_lock
:
2443 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2444 IR
.MakeCacheHit
= True
2445 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2446 print("[cache hit]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2449 ## Show the first file name which causes cache miss
2450 def PrintFirstMakeCacheMissFile(self
, gDict
):
2451 if not GlobalData
.gBinCacheSource
:
2454 # skip if the module cache already crashed
2455 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2458 # skip binary module
2459 if self
.IsBinaryModule
:
2462 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2465 # Only print cache miss file for the MakeCache not hit module
2466 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2469 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2470 EdkLogger
.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2473 # Find the cache dir name through the .ModuleHashPair file info
2474 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2476 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2477 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2478 if not os
.path
.exists(ModuleHashPair
):
2479 EdkLogger
.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2483 with
open(ModuleHashPair
, 'r') as f
:
2484 ModuleHashPairList
= json
.load(f
)
2486 EdkLogger
.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2490 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2491 TargetHashDir
= path
.join(FileDir
, str(MakeHash
))
2492 if os
.path
.exists(TargetHashDir
):
2493 MakeHashSet
.add(MakeHash
)
2495 EdkLogger
.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2498 TargetHash
= list(MakeHashSet
)[0]
2499 TargetHashDir
= path
.join(FileDir
, str(TargetHash
))
2500 if len(MakeHashSet
) > 1 :
2501 EdkLogger
.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash
, self
.MetaFile
.Path
, self
.Arch
))
2503 ListFile
= path
.join(TargetHashDir
, self
.Name
+ '.MakeHashChain')
2504 if os
.path
.exists(ListFile
):
2506 f
= open(ListFile
, 'r')
2507 CachedList
= json
.load(f
)
2510 EdkLogger
.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile
)
2513 EdkLogger
.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile
)
2516 CurrentList
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
2517 for idx
, (file, hash) in enumerate (CurrentList
):
2518 (filecached
, hashcached
) = CachedList
[idx
]
2519 if file != filecached
:
2520 EdkLogger
.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self
.MetaFile
.Path
, self
.Arch
, file, filecached
))
2522 if hash != hashcached
:
2523 EdkLogger
.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self
.MetaFile
.Path
, self
.Arch
, file))
2528 ## Decide whether we can skip the ModuleAutoGen process
2529 def CanSkipbyCache(self
, gDict
):
2530 # Hashing feature is off
2531 if not GlobalData
.gBinCacheSource
:
2534 if self
in GlobalData
.gBuildHashSkipTracking
:
2535 return GlobalData
.gBuildHashSkipTracking
[self
]
2537 # If library or Module is binary do not skip by hash
2538 if self
.IsBinaryModule
:
2539 GlobalData
.gBuildHashSkipTracking
[self
] = False
2542 # .inc is contains binary information so do not skip by hash as well
2543 for f_ext
in self
.SourceFileList
:
2544 if '.inc' in str(f_ext
):
2545 GlobalData
.gBuildHashSkipTracking
[self
] = False
2548 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2551 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakeCacheHit
:
2552 GlobalData
.gBuildHashSkipTracking
[self
] = True
2555 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2556 GlobalData
.gBuildHashSkipTracking
[self
] = True
2561 ## Decide whether we can skip the ModuleAutoGen process
2562 # If any source file is newer than the module than we cannot skip
2565 # Don't skip if cache feature enabled
2566 if GlobalData
.gUseHashCache
or GlobalData
.gBinCacheDest
or GlobalData
.gBinCacheSource
:
2568 if self
.MakeFileDir
in GlobalData
.gSikpAutoGenCache
:
2570 if not os
.path
.exists(self
.TimeStampPath
):
2572 #last creation time of the module
2573 DstTimeStamp
= os
.stat(self
.TimeStampPath
)[8]
2575 SrcTimeStamp
= self
.Workspace
._SrcTimeStamp
2576 if SrcTimeStamp
> DstTimeStamp
:
2579 with
open(self
.TimeStampPath
,'r') as f
:
2581 source
= source
.rstrip('\n')
2582 if not os
.path
.exists(source
):
2584 if source
not in ModuleAutoGen
.TimeDict
:
2585 ModuleAutoGen
.TimeDict
[source
] = os
.stat(source
)[8]
2586 if ModuleAutoGen
.TimeDict
[source
] > DstTimeStamp
:
2588 GlobalData
.gSikpAutoGenCache
.add(self
.MakeFileDir
)
2592 def TimeStampPath(self
):
2593 return os
.path
.join(self
.MakeFileDir
, 'AutoGenTimeStamp')