2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 from AutoGen
.AutoGen
import AutoGen
9 from Common
.LongFilePathSupport
import LongFilePath
, CopyLongFilePath
10 from Common
.BuildToolError
import *
11 from Common
.DataType
import *
12 from Common
.Misc
import *
13 from Common
.StringUtils
import NormPath
,GetSplitList
14 from collections
import defaultdict
15 from Workspace
.WorkspaceCommon
import OrderedListDict
16 import os
.path
as path
19 from . import InfSectionParser
22 from . import GenDepex
23 from io
import BytesIO
24 from GenPatchPcdTable
.GenPatchPcdTable
import parsePcdInfoFromMapFile
25 from Workspace
.MetaFileCommentParser
import UsageList
26 from .GenPcdDb
import CreatePcdDatabaseCode
27 from Common
.caching
import cached_class_function
28 from AutoGen
.ModuleAutoGenHelper
import PlatformInfo
,WorkSpaceInfo
32 ## Mapping Makefile type
33 gMakeTypeMap
= {TAB_COMPILER_MSFT
:"nmake", "GCC":"gmake"}
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
36 # is the former use /I , the Latter used -I to specify include directories
38 gBuildOptIncludePatternMsft
= re
.compile(r
"(?:.*?)/I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
39 gBuildOptIncludePatternOther
= re
.compile(r
"(?:.*?)-I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
41 ## default file name for AutoGen
42 gAutoGenCodeFileName
= "AutoGen.c"
43 gAutoGenHeaderFileName
= "AutoGen.h"
44 gAutoGenStringFileName
= "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName
= "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName
= "%(module_name)s.depex"
47 gAutoGenImageDefFileName
= "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName
= "%(module_name)sIdf.hpk"
49 gInfSpecVersion
= "0x00010017"
52 # Match name = variable
54 gEfiVarStoreNamePattern
= re
.compile("\s*name\s*=\s*(\w+)")
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
59 gEfiVarStoreGuidPattern
= re
.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
62 # Template string to generic AsBuilt INF
64 gAsBuiltInfHeaderString
= TemplateString("""${header_comments}
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
92 [Packages.${module_arch}]${BEGIN}
95 [Binaries.${module_arch}]${BEGIN}
98 [PatchPcd.${module_arch}]${BEGIN}
102 [Protocols.${module_arch}]${BEGIN}
106 [Ppis.${module_arch}]${BEGIN}
110 [Guids.${module_arch}]${BEGIN}
114 [PcdEx.${module_arch}]${BEGIN}
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
124 ${userextension_tianocore_item}
128 [BuildOptions.${module_arch}]
130 ## ${flags_item}${END}
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
136 def ExtendCopyDictionaryLists(CopyToDict
, CopyFromDict
):
137 for Key
in CopyFromDict
:
138 CopyToDict
[Key
].extend(CopyFromDict
[Key
])
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList
):
142 RetVal
= path
.join(*PathList
)
143 CreateDirectory(RetVal
)
147 # Convert string to C format array
149 def _ConvertStringToByteArray(Value
):
150 Value
= Value
.strip()
154 if not Value
.endswith('}'):
156 Value
= Value
.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields
= Value
.split(',')
159 for Index
in range(len(ValFields
)):
160 ValFields
[Index
] = str(int(ValFields
[Index
], 0))
163 Value
= '{' + ','.join(ValFields
) + '}'
167 if Value
.startswith('L"'):
168 if not Value
.endswith('"'):
172 elif not Value
.startswith('"') or not Value
.endswith('"'):
175 Value
= eval(Value
) # translate escape character
177 for Index
in range(0, len(Value
)):
179 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x10000) + ','
181 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x100) + ','
182 Value
= NewValue
+ '0}'
185 ## ModuleAutoGen class
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
191 class ModuleAutoGen(AutoGen
):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
194 if not hasattr(self
, "_Init"):
195 self
._InitWorker
(Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
)
198 ## Cache the timestamps of metafiles of every module in a class attribute
202 def __new__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace
, args
[0], Target
, Toolchain
, Arch
,args
[-1]).ValidModule(MetaFile
):
205 EdkLogger
.verbose("Module [%s] for [%s] is not employed by active platform\n" \
208 return super(ModuleAutoGen
, cls
).__new
__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
)
210 ## Initialize ModuleAutoGen
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
219 def _InitWorker(self
, Workspace
, ModuleFile
, Target
, Toolchain
, Arch
, PlatformFile
,DataPipe
):
220 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "AutoGen module [%s] [%s]" % (ModuleFile
, Arch
))
221 GlobalData
.gProcessingFile
= "%s [%s, %s, %s]" % (ModuleFile
, Arch
, Toolchain
, Target
)
223 self
.Workspace
= Workspace
224 self
.WorkspaceDir
= ""
225 self
.PlatformInfo
= None
226 self
.DataPipe
= DataPipe
227 self
.__init
_platform
_info
__()
228 self
.MetaFile
= ModuleFile
229 self
.SourceDir
= self
.MetaFile
.SubDir
230 self
.SourceDir
= mws
.relpath(self
.SourceDir
, self
.WorkspaceDir
)
232 self
.ToolChain
= Toolchain
233 self
.BuildTarget
= Target
235 self
.ToolChainFamily
= self
.PlatformInfo
.ToolChainFamily
236 self
.BuildRuleFamily
= self
.PlatformInfo
.BuildRuleFamily
238 self
.IsCodeFileCreated
= False
239 self
.IsAsBuiltInfCreated
= False
240 self
.DepexGenerated
= False
242 self
.BuildDatabase
= self
.Workspace
.BuildDatabase
243 self
.BuildRuleOrder
= None
246 self
._GuidComments
= OrderedListDict()
247 self
._ProtocolComments
= OrderedListDict()
248 self
._PpiComments
= OrderedListDict()
249 self
._BuildTargets
= None
250 self
._IntroBuildTargetList
= None
251 self
._FinalBuildTargetList
= None
252 self
._FileTypes
= None
254 self
.AutoGenDepSet
= set()
255 self
.ReferenceModules
= []
258 self
.FileDependCache
= {}
260 def __init_platform_info__(self
):
261 pinfo
= self
.DataPipe
.Get("P_Info")
262 self
.WorkspaceDir
= pinfo
.get("WorkspaceDir")
263 self
.PlatformInfo
= PlatformInfo(self
.Workspace
,pinfo
.get("ActivePlatform"),pinfo
.get("Target"),pinfo
.get("ToolChain"),pinfo
.get("Arch"),self
.DataPipe
)
264 ## hash() operator of ModuleAutoGen
266 # The module file path and arch string will be used to represent
267 # hash value of this object
269 # @retval int Hash value of the module file path and arch
271 @cached_class_function
273 return hash((self
.MetaFile
, self
.Arch
, self
.ToolChain
,self
.BuildTarget
))
275 return "%s [%s]" % (self
.MetaFile
, self
.Arch
)
277 # Get FixedAtBuild Pcds of this Module
279 def FixedAtBuildPcds(self
):
281 for Pcd
in self
.ModulePcdList
:
282 if Pcd
.Type
!= TAB_PCDS_FIXED_AT_BUILD
:
284 if Pcd
not in RetVal
:
289 def FixedVoidTypePcds(self
):
291 for Pcd
in self
.FixedAtBuildPcds
:
292 if Pcd
.DatumType
== TAB_VOID
:
293 if '.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
)) not in RetVal
:
294 RetVal
['.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
))] = Pcd
.DefaultValue
298 def UniqueBaseName(self
):
299 ModuleNames
= self
.DataPipe
.Get("M_Name")
302 return ModuleNames
.get((self
.Name
,self
.MetaFile
),self
.Name
)
304 # Macros could be used in build_rule.txt (also Makefile)
308 ("WORKSPACE" ,self
.WorkspaceDir
),
309 ("MODULE_NAME" ,self
.Name
),
310 ("MODULE_NAME_GUID" ,self
.UniqueBaseName
),
311 ("MODULE_GUID" ,self
.Guid
),
312 ("MODULE_VERSION" ,self
.Version
),
313 ("MODULE_TYPE" ,self
.ModuleType
),
314 ("MODULE_FILE" ,str(self
.MetaFile
)),
315 ("MODULE_FILE_BASE_NAME" ,self
.MetaFile
.BaseName
),
316 ("MODULE_RELATIVE_DIR" ,self
.SourceDir
),
317 ("MODULE_DIR" ,self
.SourceDir
),
318 ("BASE_NAME" ,self
.Name
),
320 ("TOOLCHAIN" ,self
.ToolChain
),
321 ("TOOLCHAIN_TAG" ,self
.ToolChain
),
322 ("TOOL_CHAIN_TAG" ,self
.ToolChain
),
323 ("TARGET" ,self
.BuildTarget
),
324 ("BUILD_DIR" ,self
.PlatformInfo
.BuildDir
),
325 ("BIN_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
326 ("LIB_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
327 ("MODULE_BUILD_DIR" ,self
.BuildDir
),
328 ("OUTPUT_DIR" ,self
.OutputDir
),
329 ("DEBUG_DIR" ,self
.DebugDir
),
330 ("DEST_DIR_OUTPUT" ,self
.OutputDir
),
331 ("DEST_DIR_DEBUG" ,self
.DebugDir
),
332 ("PLATFORM_NAME" ,self
.PlatformInfo
.Name
),
333 ("PLATFORM_GUID" ,self
.PlatformInfo
.Guid
),
334 ("PLATFORM_VERSION" ,self
.PlatformInfo
.Version
),
335 ("PLATFORM_RELATIVE_DIR" ,self
.PlatformInfo
.SourceDir
),
336 ("PLATFORM_DIR" ,mws
.join(self
.WorkspaceDir
, self
.PlatformInfo
.SourceDir
)),
337 ("PLATFORM_OUTPUT_DIR" ,self
.PlatformInfo
.OutputDir
),
338 ("FFS_OUTPUT_DIR" ,self
.FfsOutputDir
)
341 ## Return the module build data object
344 return self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
346 ## Return the module name
349 return self
.Module
.BaseName
351 ## Return the module DxsFile if exist
354 return self
.Module
.DxsFile
356 ## Return the module meta-file GUID
360 # To build same module more than once, the module path with FILE_GUID overridden has
361 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
362 # in DSC. The overridden GUID can be retrieved from file name
364 if os
.path
.basename(self
.MetaFile
.File
) != os
.path
.basename(self
.MetaFile
.Path
):
366 # Length of GUID is 36
368 return os
.path
.basename(self
.MetaFile
.Path
)[:36]
369 return self
.Module
.Guid
371 ## Return the module version
374 return self
.Module
.Version
376 ## Return the module type
378 def ModuleType(self
):
379 return self
.Module
.ModuleType
381 ## Return the component type (for Edk.x style of module)
383 def ComponentType(self
):
384 return self
.Module
.ComponentType
386 ## Return the build type
389 return self
.Module
.BuildType
391 ## Return the PCD_IS_DRIVER setting
393 def PcdIsDriver(self
):
394 return self
.Module
.PcdIsDriver
396 ## Return the autogen version, i.e. module meta-file version
398 def AutoGenVersion(self
):
399 return self
.Module
.AutoGenVersion
401 ## Check if the module is library or not
404 return bool(self
.Module
.LibraryClass
)
406 ## Check if the module is binary module or not
408 def IsBinaryModule(self
):
409 return self
.Module
.IsBinaryModule
411 ## Return the directory to store intermediate files of the module
415 self
.PlatformInfo
.BuildDir
,
418 self
.MetaFile
.BaseName
421 ## Return the directory to store the intermediate object files of the module
424 return _MakeDir((self
.BuildDir
, "OUTPUT"))
426 ## Return the directory path to store ffs file
428 def FfsOutputDir(self
):
429 if GlobalData
.gFdfParser
:
430 return path
.join(self
.PlatformInfo
.BuildDir
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
433 ## Return the directory to store auto-gened source files of the module
436 return _MakeDir((self
.BuildDir
, "DEBUG"))
438 ## Return the path of custom file
440 def CustomMakefile(self
):
442 for Type
in self
.Module
.CustomMakefile
:
443 MakeType
= gMakeTypeMap
[Type
] if Type
in gMakeTypeMap
else 'nmake'
444 File
= os
.path
.join(self
.SourceDir
, self
.Module
.CustomMakefile
[Type
])
445 RetVal
[MakeType
] = File
448 ## Return the directory of the makefile
450 # @retval string The directory string of module's makefile
453 def MakeFileDir(self
):
456 ## Return build command string
458 # @retval string Build command string
461 def BuildCommand(self
):
462 return self
.PlatformInfo
.BuildCommand
464 ## Get Module package and Platform package
466 # @retval list The list of package object
469 def PackageList(self
):
471 if self
.Module
.Packages
:
472 PkagList
.extend(self
.Module
.Packages
)
473 Platform
= self
.BuildDatabase
[self
.PlatformInfo
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
474 for Package
in Platform
.Packages
:
475 if Package
in PkagList
:
477 PkagList
.append(Package
)
480 ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
482 # @retval list The list of package object
485 def DerivedPackageList(self
):
487 PackageList
.extend(self
.PackageList
)
488 for M
in self
.DependentLibraryList
:
489 for Package
in M
.Packages
:
490 if Package
in PackageList
:
492 PackageList
.append(Package
)
495 ## Get the depex string
497 # @return : a string contain all depex expression.
498 def _GetDepexExpresionString(self
):
501 ## DPX_SOURCE IN Define section.
502 if self
.Module
.DxsFile
:
504 for M
in [self
.Module
] + self
.DependentLibraryList
:
505 Filename
= M
.MetaFile
.Path
506 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
507 DepexExpressionList
= InfObj
.GetDepexExpresionList()
508 for DepexExpression
in DepexExpressionList
:
509 for key
in DepexExpression
:
510 Arch
, ModuleType
= key
511 DepexExpr
= [x
for x
in DepexExpression
[key
] if not str(x
).startswith('#')]
512 # the type of build module is USER_DEFINED.
513 # All different DEPEX section tags would be copied into the As Built INF file
514 # and there would be separate DEPEX section tags
515 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
516 if (Arch
.upper() == self
.Arch
.upper()) and (ModuleType
.upper() != TAB_ARCH_COMMON
):
517 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
519 if Arch
.upper() == TAB_ARCH_COMMON
or \
520 (Arch
.upper() == self
.Arch
.upper() and \
521 ModuleType
.upper() in [TAB_ARCH_COMMON
, self
.ModuleType
.upper()]):
522 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
524 #the type of build module is USER_DEFINED.
525 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
526 for Depex
in DepexList
:
528 DepexStr
+= '[Depex.%s.%s]\n' % key
529 DepexStr
+= '\n'.join('# '+ val
for val
in Depex
[key
])
532 return '[Depex.%s]\n' % self
.Arch
535 #the type of build module not is USER_DEFINED.
537 for Depex
in DepexList
:
542 for D
in Depex
.values():
543 DepexStr
+= ' '.join(val
for val
in D
)
544 Index
= DepexStr
.find('END')
545 if Index
> -1 and Index
== len(DepexStr
) - 3:
546 DepexStr
= DepexStr
[:-3]
547 DepexStr
= DepexStr
.strip()
550 DepexStr
= DepexStr
.lstrip('(').rstrip(')').strip()
552 return '[Depex.%s]\n' % self
.Arch
553 return '[Depex.%s]\n# ' % self
.Arch
+ DepexStr
555 ## Merge dependency expression
557 # @retval list The token list of the dependency expression after parsed
561 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
566 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
568 FixedVoidTypePcds
= {}
569 for M
in [self
] + self
.LibraryAutoGenList
:
570 FixedVoidTypePcds
.update(M
.FixedVoidTypePcds
)
571 for M
in [self
] + self
.LibraryAutoGenList
:
573 for D
in M
.Module
.Depex
[self
.Arch
, self
.ModuleType
]:
575 DepexList
.append('AND')
576 DepexList
.append('(')
577 #replace D with value if D is FixedAtBuild PCD
584 Value
= FixedVoidTypePcds
[item
]
585 if len(Value
.split(',')) != 16:
586 EdkLogger
.error("build", FORMAT_INVALID
,
587 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item
))
588 NewList
.append(Value
)
590 EdkLogger
.error("build", FORMAT_INVALID
, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item
))
592 DepexList
.extend(NewList
)
593 if DepexList
[-1] == 'END': # no need of a END at this time
595 DepexList
.append(')')
598 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.Module
.BaseName
, DepexList
))
599 if 'BEFORE' in DepexList
or 'AFTER' in DepexList
:
601 if len(DepexList
) > 0:
602 EdkLogger
.verbose('')
603 return {self
.ModuleType
:DepexList
}
605 ## Merge dependency expression
607 # @retval list The token list of the dependency expression after parsed
610 def DepexExpressionDict(self
):
611 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
614 DepexExpressionString
= ''
616 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
618 for M
in [self
.Module
] + self
.DependentLibraryList
:
620 for D
in M
.DepexExpression
[self
.Arch
, self
.ModuleType
]:
621 if DepexExpressionString
!= '':
622 DepexExpressionString
+= ' AND '
623 DepexExpressionString
+= '('
624 DepexExpressionString
+= D
625 DepexExpressionString
= DepexExpressionString
.rstrip('END').strip()
626 DepexExpressionString
+= ')'
629 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.BaseName
, DepexExpressionString
))
630 if 'BEFORE' in DepexExpressionString
or 'AFTER' in DepexExpressionString
:
632 if len(DepexExpressionString
) > 0:
633 EdkLogger
.verbose('')
635 return {self
.ModuleType
:DepexExpressionString
}
637 # Get the tiano core user extension, it is contain dependent library.
638 # @retval: a list contain tiano core userextension.
640 def _GetTianoCoreUserExtensionList(self
):
641 TianoCoreUserExtentionList
= []
642 for M
in [self
.Module
] + self
.DependentLibraryList
:
643 Filename
= M
.MetaFile
.Path
644 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
645 TianoCoreUserExtenList
= InfObj
.GetUserExtensionTianoCore()
646 for TianoCoreUserExtent
in TianoCoreUserExtenList
:
647 for Section
in TianoCoreUserExtent
:
648 ItemList
= Section
.split(TAB_SPLIT
)
650 if len(ItemList
) == 4:
652 if Arch
.upper() == TAB_ARCH_COMMON
or Arch
.upper() == self
.Arch
.upper():
654 TianoCoreList
.extend([TAB_SECTION_START
+ Section
+ TAB_SECTION_END
])
655 TianoCoreList
.extend(TianoCoreUserExtent
[Section
][:])
656 TianoCoreList
.append('\n')
657 TianoCoreUserExtentionList
.append(TianoCoreList
)
659 return TianoCoreUserExtentionList
661 ## Return the list of specification version required for the module
663 # @retval list The list of specification defined in module file
666 def Specification(self
):
667 return self
.Module
.Specification
669 ## Tool option for the module build
671 # @param PlatformInfo The object of PlatformBuildInfo
672 # @retval dict The dict containing valid options
675 def BuildOption(self
):
676 RetVal
, self
.BuildRuleOrder
= self
.PlatformInfo
.ApplyBuildOption(self
.Module
)
677 if self
.BuildRuleOrder
:
678 self
.BuildRuleOrder
= ['.%s' % Ext
for Ext
in self
.BuildRuleOrder
.split()]
681 ## Get include path list from tool option for the module build
683 # @retval list The include path list
686 def BuildOptionIncPathList(self
):
688 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
689 # is the former use /I , the Latter used -I to specify include directories
691 if self
.PlatformInfo
.ToolChainFamily
in (TAB_COMPILER_MSFT
):
692 BuildOptIncludeRegEx
= gBuildOptIncludePatternMsft
693 elif self
.PlatformInfo
.ToolChainFamily
in ('INTEL', 'GCC', 'RVCT'):
694 BuildOptIncludeRegEx
= gBuildOptIncludePatternOther
697 # New ToolChainFamily, don't known whether there is option to specify include directories
702 for Tool
in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
704 FlagOption
= self
.BuildOption
[Tool
]['FLAGS']
708 if self
.ToolChainFamily
!= 'RVCT':
709 IncPathList
= [NormPath(Path
, self
.Macros
) for Path
in BuildOptIncludeRegEx
.findall(FlagOption
)]
712 # RVCT may specify a list of directory seperated by commas
715 for Path
in BuildOptIncludeRegEx
.findall(FlagOption
):
716 PathList
= GetSplitList(Path
, TAB_COMMA_SPLIT
)
717 IncPathList
.extend(NormPath(PathEntry
, self
.Macros
) for PathEntry
in PathList
)
720 # EDK II modules must not reference header files outside of the packages they depend on or
721 # within the module's directory tree. Report error if violation.
723 if GlobalData
.gDisableIncludePathCheck
== False:
724 for Path
in IncPathList
:
725 if (Path
not in self
.IncludePathList
) and (CommonPath([Path
, self
.MetaFile
.Dir
]) != self
.MetaFile
.Dir
):
726 ErrMsg
= "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path
, Tool
, FlagOption
)
727 EdkLogger
.error("build",
730 File
=str(self
.MetaFile
))
731 RetVal
+= IncPathList
734 ## Return a list of files which can be built from source
736 # What kind of files can be built is determined by build rules in
737 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
740 def SourceFileList(self
):
742 ToolChainTagSet
= {"", TAB_STAR
, self
.ToolChain
}
743 ToolChainFamilySet
= {"", TAB_STAR
, self
.ToolChainFamily
, self
.BuildRuleFamily
}
744 for F
in self
.Module
.Sources
:
746 if F
.TagName
not in ToolChainTagSet
:
747 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "The toolchain [%s] for processing file [%s] is found, "
748 "but [%s] is currently used" % (F
.TagName
, str(F
), self
.ToolChain
))
750 # match tool chain family or build rule family
751 if F
.ToolChainFamily
not in ToolChainFamilySet
:
754 "The file [%s] must be built by tools of [%s], " \
755 "but current toolchain family is [%s], buildrule family is [%s]" \
756 % (str(F
), F
.ToolChainFamily
, self
.ToolChainFamily
, self
.BuildRuleFamily
))
759 # add the file path into search path list for file including
760 if F
.Dir
not in self
.IncludePathList
:
761 self
.IncludePathList
.insert(0, F
.Dir
)
764 self
._MatchBuildRuleOrder
(RetVal
)
767 self
._ApplyBuildRule
(F
, TAB_UNKNOWN_FILE
)
770 def _MatchBuildRuleOrder(self
, FileList
):
773 for SingleFile
in FileList
:
774 if self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRules
:
775 key
= SingleFile
.Path
.rsplit(SingleFile
.Ext
,1)[0]
776 if key
in Order_Dict
:
777 Order_Dict
[key
].append(SingleFile
.Ext
)
779 Order_Dict
[key
] = [SingleFile
.Ext
]
783 if len(Order_Dict
[F
]) > 1:
784 Order_Dict
[F
].sort(key
=lambda i
: self
.BuildRuleOrder
.index(i
))
785 for Ext
in Order_Dict
[F
][1:]:
786 RemoveList
.append(F
+ Ext
)
788 for item
in RemoveList
:
789 FileList
.remove(item
)
793 ## Return the list of unicode files
795 def UnicodeFileList(self
):
796 return self
.FileTypes
.get(TAB_UNICODE_FILE
,[])
798 ## Return the list of vfr files
800 def VfrFileList(self
):
801 return self
.FileTypes
.get(TAB_VFR_FILE
, [])
803 ## Return the list of Image Definition files
805 def IdfFileList(self
):
806 return self
.FileTypes
.get(TAB_IMAGE_FILE
,[])
808 ## Return a list of files which can be built from binary
810 # "Build" binary files are just to copy them to build directory.
812 # @retval list The list of files which can be built later
815 def BinaryFileList(self
):
817 for F
in self
.Module
.Binaries
:
818 if F
.Target
not in [TAB_ARCH_COMMON
, TAB_STAR
] and F
.Target
!= self
.BuildTarget
:
821 self
._ApplyBuildRule
(F
, F
.Type
, BinaryFileList
=RetVal
)
825 def BuildRules(self
):
827 BuildRuleDatabase
= self
.PlatformInfo
.BuildRule
828 for Type
in BuildRuleDatabase
.FileTypeList
:
829 #first try getting build rule by BuildRuleFamily
830 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.BuildRuleFamily
]
832 # build type is always module type, but ...
833 if self
.ModuleType
!= self
.BuildType
:
834 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.BuildRuleFamily
]
835 #second try getting build rule by ToolChainFamily
837 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.ToolChainFamily
]
839 # build type is always module type, but ...
840 if self
.ModuleType
!= self
.BuildType
:
841 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.ToolChainFamily
]
844 RuleObject
= RuleObject
.Instantiate(self
.Macros
)
845 RetVal
[Type
] = RuleObject
846 for Ext
in RuleObject
.SourceFileExtList
:
847 RetVal
[Ext
] = RuleObject
850 def _ApplyBuildRule(self
, File
, FileType
, BinaryFileList
=None):
851 if self
._BuildTargets
is None:
852 self
._IntroBuildTargetList
= set()
853 self
._FinalBuildTargetList
= set()
854 self
._BuildTargets
= defaultdict(set)
855 self
._FileTypes
= defaultdict(set)
857 if not BinaryFileList
:
858 BinaryFileList
= self
.BinaryFileList
860 SubDirectory
= os
.path
.join(self
.OutputDir
, File
.SubDir
)
861 if not os
.path
.exists(SubDirectory
):
862 CreateDirectory(SubDirectory
)
868 # Make sure to get build rule order value
872 while Index
< len(SourceList
):
873 Source
= SourceList
[Index
]
877 CreateDirectory(Source
.Dir
)
879 if File
.IsBinary
and File
== Source
and File
in BinaryFileList
:
880 # Skip all files that are not binary libraries
881 if not self
.IsLibrary
:
883 RuleObject
= self
.BuildRules
[TAB_DEFAULT_BINARY_FILE
]
884 elif FileType
in self
.BuildRules
:
885 RuleObject
= self
.BuildRules
[FileType
]
886 elif Source
.Ext
in self
.BuildRules
:
887 RuleObject
= self
.BuildRules
[Source
.Ext
]
889 # stop at no more rules
891 self
._FinalBuildTargetList
.add(LastTarget
)
894 FileType
= RuleObject
.SourceFileType
895 self
._FileTypes
[FileType
].add(Source
)
897 # stop at STATIC_LIBRARY for library
898 if self
.IsLibrary
and FileType
== TAB_STATIC_LIBRARY
:
900 self
._FinalBuildTargetList
.add(LastTarget
)
903 Target
= RuleObject
.Apply(Source
, self
.BuildRuleOrder
)
906 self
._FinalBuildTargetList
.add(LastTarget
)
908 elif not Target
.Outputs
:
909 # Only do build for target with outputs
910 self
._FinalBuildTargetList
.add(Target
)
912 self
._BuildTargets
[FileType
].add(Target
)
914 if not Source
.IsBinary
and Source
== File
:
915 self
._IntroBuildTargetList
.add(Target
)
917 # to avoid cyclic rule
918 if FileType
in RuleChain
:
921 RuleChain
.add(FileType
)
922 SourceList
.extend(Target
.Outputs
)
924 FileType
= TAB_UNKNOWN_FILE
928 if self
._BuildTargets
is None:
929 self
._IntroBuildTargetList
= set()
930 self
._FinalBuildTargetList
= set()
931 self
._BuildTargets
= defaultdict(set)
932 self
._FileTypes
= defaultdict(set)
934 #TRICK: call SourceFileList property to apply build rule for source files
937 #TRICK: call _GetBinaryFileList to apply build rule for binary files
940 return self
._BuildTargets
943 def IntroTargetList(self
):
945 return self
._IntroBuildTargetList
948 def CodaTargetList(self
):
950 return self
._FinalBuildTargetList
955 return self
._FileTypes
957 ## Get the list of package object the module depends on and the Platform depends on
959 # @retval list The package object list
962 def DependentPackageList(self
):
963 return self
.PackageList
965 ## Return the list of auto-generated code file
967 # @retval list The list of auto-generated file
970 def AutoGenFileList(self
):
971 AutoGenUniIdf
= self
.BuildType
!= 'UEFI_HII'
972 UniStringBinBuffer
= BytesIO()
973 IdfGenBinBuffer
= BytesIO()
975 AutoGenC
= TemplateString()
976 AutoGenH
= TemplateString()
977 StringH
= TemplateString()
978 StringIdf
= TemplateString()
979 GenC
.CreateCode(self
, AutoGenC
, AutoGenH
, StringH
, AutoGenUniIdf
, UniStringBinBuffer
, StringIdf
, AutoGenUniIdf
, IdfGenBinBuffer
)
981 # AutoGen.c is generated if there are library classes in inf, or there are object files
983 if str(AutoGenC
) != "" and (len(self
.Module
.LibraryClasses
) > 0
984 or TAB_OBJECT_FILE
in self
.FileTypes
):
985 AutoFile
= PathClass(gAutoGenCodeFileName
, self
.DebugDir
)
986 RetVal
[AutoFile
] = str(AutoGenC
)
987 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
988 if str(AutoGenH
) != "":
989 AutoFile
= PathClass(gAutoGenHeaderFileName
, self
.DebugDir
)
990 RetVal
[AutoFile
] = str(AutoGenH
)
991 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
992 if str(StringH
) != "":
993 AutoFile
= PathClass(gAutoGenStringFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
994 RetVal
[AutoFile
] = str(StringH
)
995 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
996 if UniStringBinBuffer
is not None and UniStringBinBuffer
.getvalue() != b
"":
997 AutoFile
= PathClass(gAutoGenStringFormFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
998 RetVal
[AutoFile
] = UniStringBinBuffer
.getvalue()
999 AutoFile
.IsBinary
= True
1000 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
1001 if UniStringBinBuffer
is not None:
1002 UniStringBinBuffer
.close()
1003 if str(StringIdf
) != "":
1004 AutoFile
= PathClass(gAutoGenImageDefFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
1005 RetVal
[AutoFile
] = str(StringIdf
)
1006 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
1007 if IdfGenBinBuffer
is not None and IdfGenBinBuffer
.getvalue() != b
"":
1008 AutoFile
= PathClass(gAutoGenIdfFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
1009 RetVal
[AutoFile
] = IdfGenBinBuffer
.getvalue()
1010 AutoFile
.IsBinary
= True
1011 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
1012 if IdfGenBinBuffer
is not None:
1013 IdfGenBinBuffer
.close()
1016 ## Return the list of library modules explicitly or implicitly used by this module
1018 def DependentLibraryList(self
):
1019 # only merge library classes and PCD for non-library module
1022 return self
.PlatformInfo
.ApplyLibraryInstance(self
.Module
)
1024 ## Get the list of PCDs from current module
1026 # @retval list The list of PCD
1029 def ModulePcdList(self
):
1030 # apply PCD settings from platform
1031 RetVal
= self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, self
.Module
.Pcds
)
1035 def _PcdComments(self
):
1036 ReVal
= OrderedListDict()
1037 ExtendCopyDictionaryLists(ReVal
, self
.Module
.PcdComments
)
1038 if not self
.IsLibrary
:
1039 for Library
in self
.DependentLibraryList
:
1040 ExtendCopyDictionaryLists(ReVal
, Library
.PcdComments
)
1043 ## Get the list of PCDs from dependent libraries
1045 # @retval list The list of PCD
1048 def LibraryPcdList(self
):
1053 # get PCDs from dependent libraries
1054 for Library
in self
.DependentLibraryList
:
1055 PcdsInLibrary
= OrderedDict()
1056 for Key
in Library
.Pcds
:
1057 # skip duplicated PCDs
1058 if Key
in self
.Module
.Pcds
or Key
in Pcds
:
1061 PcdsInLibrary
[Key
] = copy
.copy(Library
.Pcds
[Key
])
1062 RetVal
.extend(self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, PcdsInLibrary
, Library
=Library
))
1065 ## Get the GUID value mapping
1067 # @retval dict The mapping between GUID cname and its value
1071 RetVal
= self
.Module
.Guids
1072 for Library
in self
.DependentLibraryList
:
1073 RetVal
.update(Library
.Guids
)
1074 ExtendCopyDictionaryLists(self
._GuidComments
, Library
.GuidComments
)
1075 ExtendCopyDictionaryLists(self
._GuidComments
, self
.Module
.GuidComments
)
1079 def GetGuidsUsedByPcd(self
):
1080 RetVal
= OrderedDict(self
.Module
.GetGuidsUsedByPcd())
1081 for Library
in self
.DependentLibraryList
:
1082 RetVal
.update(Library
.GetGuidsUsedByPcd())
1084 ## Get the protocol value mapping
1086 # @retval dict The mapping between protocol cname and its value
1089 def ProtocolList(self
):
1090 RetVal
= OrderedDict(self
.Module
.Protocols
)
1091 for Library
in self
.DependentLibraryList
:
1092 RetVal
.update(Library
.Protocols
)
1093 ExtendCopyDictionaryLists(self
._ProtocolComments
, Library
.ProtocolComments
)
1094 ExtendCopyDictionaryLists(self
._ProtocolComments
, self
.Module
.ProtocolComments
)
1097 ## Get the PPI value mapping
1099 # @retval dict The mapping between PPI cname and its value
1103 RetVal
= OrderedDict(self
.Module
.Ppis
)
1104 for Library
in self
.DependentLibraryList
:
1105 RetVal
.update(Library
.Ppis
)
1106 ExtendCopyDictionaryLists(self
._PpiComments
, Library
.PpiComments
)
1107 ExtendCopyDictionaryLists(self
._PpiComments
, self
.Module
.PpiComments
)
1110 ## Get the list of include search path
1112 # @retval list The list path
1115 def IncludePathList(self
):
1117 RetVal
.append(self
.MetaFile
.Dir
)
1118 RetVal
.append(self
.DebugDir
)
1120 for Package
in self
.PackageList
:
1121 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1122 if PackageDir
not in RetVal
:
1123 RetVal
.append(PackageDir
)
1124 IncludesList
= Package
.Includes
1125 if Package
._PrivateIncludes
:
1126 if not self
.MetaFile
.OriginalPath
.Path
.startswith(PackageDir
):
1127 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1128 for Inc
in IncludesList
:
1129 if Inc
not in RetVal
:
1130 RetVal
.append(str(Inc
))
1131 RetVal
.extend(self
.IncPathFromBuildOptions
)
1135 def IncPathFromBuildOptions(self
):
1137 for tool
in self
.BuildOption
:
1138 if 'FLAGS' in self
.BuildOption
[tool
]:
1139 flags
= self
.BuildOption
[tool
]['FLAGS']
1141 for flag
in flags
.split(" "):
1143 if flag
.startswith(("/I","-I")):
1145 if os
.path
.exists(flag
[2:]):
1146 IncPathList
.append(flag
[2:])
1150 if whitespace
and flag
:
1151 if os
.path
.exists(flag
):
1152 IncPathList
.append(flag
)
1157 def IncludePathLength(self
):
1158 return sum(len(inc
)+1 for inc
in self
.IncludePathList
)
1160 ## Get the list of include paths from the packages
1162 # @IncludesList list The list path
1165 def PackageIncludePathList(self
):
1167 for Package
in self
.PackageList
:
1168 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1169 IncludesList
= Package
.Includes
1170 if Package
._PrivateIncludes
:
1171 if not self
.MetaFile
.Path
.startswith(PackageDir
):
1172 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1175 ## Get HII EX PCDs which maybe used by VFR
1177 # efivarstore used by VFR may relate with HII EX PCDs
1178 # Get the variable name and GUID from efivarstore and HII EX PCD
1179 # List the HII EX PCDs in As Built INF if both name and GUID match.
1181 # @retval list HII EX PCDs
1183 def _GetPcdsMaybeUsedByVfr(self
):
1184 if not self
.SourceFileList
:
1188 for SrcFile
in self
.SourceFileList
:
1189 if SrcFile
.Ext
.lower() != '.vfr':
1191 Vfri
= os
.path
.join(self
.OutputDir
, SrcFile
.BaseName
+ '.i')
1192 if not os
.path
.exists(Vfri
):
1194 VfriFile
= open(Vfri
, 'r')
1195 Content
= VfriFile
.read()
1197 Pos
= Content
.find('efivarstore')
1200 # Make sure 'efivarstore' is the start of efivarstore statement
1201 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1204 while Index
>= 0 and Content
[Index
] in ' \t\r\n':
1206 if Index
>= 0 and Content
[Index
] != ';':
1207 Pos
= Content
.find('efivarstore', Pos
+ len('efivarstore'))
1210 # 'efivarstore' must be followed by name and guid
1212 Name
= gEfiVarStoreNamePattern
.search(Content
, Pos
)
1215 Guid
= gEfiVarStoreGuidPattern
.search(Content
, Pos
)
1218 NameArray
= _ConvertStringToByteArray('L"' + Name
.group(1) + '"')
1219 NameGuids
.add((NameArray
, GuidStructureStringToGuidString(Guid
.group(1))))
1220 Pos
= Content
.find('efivarstore', Name
.end())
1224 for Pcd
in self
.PlatformInfo
.Pcds
.values():
1225 if Pcd
.Type
!= TAB_PCDS_DYNAMIC_EX_HII
:
1227 for SkuInfo
in Pcd
.SkuInfoList
.values():
1228 Value
= GuidValue(SkuInfo
.VariableGuid
, self
.PlatformInfo
.PackageList
, self
.MetaFile
.Path
)
1231 Name
= _ConvertStringToByteArray(SkuInfo
.VariableName
)
1232 Guid
= GuidStructureStringToGuidString(Value
)
1233 if (Name
, Guid
) in NameGuids
and Pcd
not in HiiExPcds
:
1234 HiiExPcds
.append(Pcd
)
1239 def _GenOffsetBin(self
):
1241 for SourceFile
in self
.Module
.Sources
:
1242 if SourceFile
.Type
.upper() == ".VFR" :
1244 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1246 VfrUniBaseName
[SourceFile
.BaseName
] = (SourceFile
.BaseName
+ "Bin")
1247 elif SourceFile
.Type
.upper() == ".UNI" :
1249 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1251 VfrUniBaseName
["UniOffsetName"] = (self
.Name
+ "Strings")
1253 if not VfrUniBaseName
:
1255 MapFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".map")
1256 EfiFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".efi")
1257 VfrUniOffsetList
= GetVariableOffset(MapFileName
, EfiFileName
, list(VfrUniBaseName
.values()))
1258 if not VfrUniOffsetList
:
1261 OutputName
= '%sOffset.bin' % self
.Name
1262 UniVfrOffsetFileName
= os
.path
.join( self
.OutputDir
, OutputName
)
1265 fInputfile
= open(UniVfrOffsetFileName
, "wb+", 0)
1267 EdkLogger
.error("build", FILE_OPEN_FAILURE
, "File open failed for %s" % UniVfrOffsetFileName
, None)
1269 # Use a instance of BytesIO to cache data
1270 fStringIO
= BytesIO()
1272 for Item
in VfrUniOffsetList
:
1273 if (Item
[0].find("Strings") != -1):
1275 # UNI offset in image.
1277 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1279 UniGuid
= b
'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1280 fStringIO
.write(UniGuid
)
1281 UniValue
= pack ('Q', int (Item
[1], 16))
1282 fStringIO
.write (UniValue
)
1285 # VFR binary offset in image.
1287 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1289 VfrGuid
= b
'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1290 fStringIO
.write(VfrGuid
)
1291 VfrValue
= pack ('Q', int (Item
[1], 16))
1292 fStringIO
.write (VfrValue
)
1294 # write data into file.
1297 fInputfile
.write (fStringIO
.getvalue())
1299 EdkLogger
.error("build", FILE_WRITE_FAILURE
, "Write data to file %s failed, please check whether the "
1300 "file been locked or using by other applications." %UniVfrOffsetFileName
, None)
1307 def OutputFile(self
):
1310 for Root
, Dirs
, Files
in os
.walk(self
.BuildDir
):
1312 # lib file is already added through above CodaTargetList, skip it here
1313 if not (File
.lower().endswith('.obj') or File
.lower().endswith('.debug')):
1314 NewFile
= path
.join(Root
, File
)
1317 for Root
, Dirs
, Files
in os
.walk(self
.FfsOutputDir
):
1319 NewFile
= path
.join(Root
, File
)
1324 ## Create AsBuilt INF file the module
1326 def CreateAsBuiltInf(self
):
1328 if self
.IsAsBuiltInfCreated
:
1331 # Skip INF file generation for libraries
1335 # Skip the following code for modules with no source files
1336 if not self
.SourceFileList
:
1339 # Skip the following code for modules without any binary files
1340 if self
.BinaryFileList
:
1343 ### TODO: How to handles mixed source and binary modules
1345 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1346 # Also find all packages that the DynamicEx PCDs depend on
1351 PcdTokenSpaceList
= []
1352 for Pcd
in self
.ModulePcdList
+ self
.LibraryPcdList
:
1353 if Pcd
.Type
== TAB_PCDS_PATCHABLE_IN_MODULE
:
1354 PatchablePcds
.append(Pcd
)
1355 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_PATCHABLE_IN_MODULE
))
1356 elif Pcd
.Type
in PCD_DYNAMIC_EX_TYPE_SET
:
1359 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
))
1360 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
))
1361 PcdTokenSpaceList
.append(Pcd
.TokenSpaceGuidCName
)
1362 GuidList
= OrderedDict(self
.GuidList
)
1363 for TokenSpace
in self
.GetGuidsUsedByPcd
:
1364 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1365 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1366 if TokenSpace
not in PcdTokenSpaceList
and TokenSpace
in GuidList
:
1367 GuidList
.pop(TokenSpace
)
1368 CheckList
= (GuidList
, self
.PpiList
, self
.ProtocolList
, PcdCheckList
)
1369 for Package
in self
.DerivedPackageList
:
1370 if Package
in Packages
:
1372 BeChecked
= (Package
.Guids
, Package
.Ppis
, Package
.Protocols
, Package
.Pcds
)
1374 for Index
in range(len(BeChecked
)):
1375 for Item
in CheckList
[Index
]:
1376 if Item
in BeChecked
[Index
]:
1377 Packages
.append(Package
)
1383 VfrPcds
= self
._GetPcdsMaybeUsedByVfr
()
1384 for Pkg
in self
.PlatformInfo
.PackageList
:
1387 for VfrPcd
in VfrPcds
:
1388 if ((VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
) in Pkg
.Pcds
or
1389 (VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
) in Pkg
.Pcds
):
1390 Packages
.append(Pkg
)
1393 ModuleType
= SUP_MODULE_DXE_DRIVER
if self
.ModuleType
== SUP_MODULE_UEFI_DRIVER
and self
.DepexGenerated
else self
.ModuleType
1394 DriverType
= self
.PcdIsDriver
if self
.PcdIsDriver
else ''
1396 MDefs
= self
.Module
.Defines
1399 'module_name' : self
.Name
,
1400 'module_guid' : Guid
,
1401 'module_module_type' : ModuleType
,
1402 'module_version_string' : [MDefs
['VERSION_STRING']] if 'VERSION_STRING' in MDefs
else [],
1403 'pcd_is_driver_string' : [],
1404 'module_uefi_specification_version' : [],
1405 'module_pi_specification_version' : [],
1406 'module_entry_point' : self
.Module
.ModuleEntryPointList
,
1407 'module_unload_image' : self
.Module
.ModuleUnloadImageList
,
1408 'module_constructor' : self
.Module
.ConstructorList
,
1409 'module_destructor' : self
.Module
.DestructorList
,
1410 'module_shadow' : [MDefs
['SHADOW']] if 'SHADOW' in MDefs
else [],
1411 'module_pci_vendor_id' : [MDefs
['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs
else [],
1412 'module_pci_device_id' : [MDefs
['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs
else [],
1413 'module_pci_class_code' : [MDefs
['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs
else [],
1414 'module_pci_revision' : [MDefs
['PCI_REVISION']] if 'PCI_REVISION' in MDefs
else [],
1415 'module_build_number' : [MDefs
['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs
else [],
1416 'module_spec' : [MDefs
['SPEC']] if 'SPEC' in MDefs
else [],
1417 'module_uefi_hii_resource_section' : [MDefs
['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs
else [],
1418 'module_uni_file' : [MDefs
['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs
else [],
1419 'module_arch' : self
.Arch
,
1420 'package_item' : [Package
.MetaFile
.File
.replace('\\', '/') for Package
in Packages
],
1422 'patchablepcd_item' : [],
1424 'protocol_item' : [],
1428 'libraryclasses_item' : []
1431 if 'MODULE_UNI_FILE' in MDefs
:
1432 UNIFile
= os
.path
.join(self
.MetaFile
.Dir
, MDefs
['MODULE_UNI_FILE'])
1433 if os
.path
.isfile(UNIFile
):
1434 shutil
.copy2(UNIFile
, self
.OutputDir
)
1436 if self
.AutoGenVersion
> int(gInfSpecVersion
, 0):
1437 AsBuiltInfDict
['module_inf_version'] = '0x%08x' % self
.AutoGenVersion
1439 AsBuiltInfDict
['module_inf_version'] = gInfSpecVersion
1442 AsBuiltInfDict
['pcd_is_driver_string'].append(DriverType
)
1444 if 'UEFI_SPECIFICATION_VERSION' in self
.Specification
:
1445 AsBuiltInfDict
['module_uefi_specification_version'].append(self
.Specification
['UEFI_SPECIFICATION_VERSION'])
1446 if 'PI_SPECIFICATION_VERSION' in self
.Specification
:
1447 AsBuiltInfDict
['module_pi_specification_version'].append(self
.Specification
['PI_SPECIFICATION_VERSION'])
1449 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1450 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1451 for Item
in self
.CodaTargetList
:
1452 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1453 if os
.path
.isabs(File
):
1454 File
= File
.replace('\\', '/').strip('/').replace(OutputDir
, '').strip('/')
1455 if Item
.Target
.Ext
.lower() == '.aml':
1456 AsBuiltInfDict
['binary_item'].append('ASL|' + File
)
1457 elif Item
.Target
.Ext
.lower() == '.acpi':
1458 AsBuiltInfDict
['binary_item'].append('ACPI|' + File
)
1459 elif Item
.Target
.Ext
.lower() == '.efi':
1460 AsBuiltInfDict
['binary_item'].append('PE32|' + self
.Name
+ '.efi')
1462 AsBuiltInfDict
['binary_item'].append('BIN|' + File
)
1463 if not self
.DepexGenerated
:
1464 DepexFile
= os
.path
.join(self
.OutputDir
, self
.Name
+ '.depex')
1465 if os
.path
.exists(DepexFile
):
1466 self
.DepexGenerated
= True
1467 if self
.DepexGenerated
:
1468 if self
.ModuleType
in [SUP_MODULE_PEIM
]:
1469 AsBuiltInfDict
['binary_item'].append('PEI_DEPEX|' + self
.Name
+ '.depex')
1470 elif self
.ModuleType
in [SUP_MODULE_DXE_DRIVER
, SUP_MODULE_DXE_RUNTIME_DRIVER
, SUP_MODULE_DXE_SAL_DRIVER
, SUP_MODULE_UEFI_DRIVER
]:
1471 AsBuiltInfDict
['binary_item'].append('DXE_DEPEX|' + self
.Name
+ '.depex')
1472 elif self
.ModuleType
in [SUP_MODULE_DXE_SMM_DRIVER
]:
1473 AsBuiltInfDict
['binary_item'].append('SMM_DEPEX|' + self
.Name
+ '.depex')
1475 Bin
= self
._GenOffsetBin
()
1477 AsBuiltInfDict
['binary_item'].append('BIN|%s' % Bin
)
1479 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1481 if File
.lower().endswith('.pdb'):
1482 AsBuiltInfDict
['binary_item'].append('DISPOSABLE|' + File
)
1483 HeaderComments
= self
.Module
.HeaderComments
1485 for Index
in range(len(HeaderComments
)):
1486 if HeaderComments
[Index
].find('@BinaryHeader') != -1:
1487 HeaderComments
[Index
] = HeaderComments
[Index
].replace('@BinaryHeader', '@file')
1490 AsBuiltInfDict
['header_comments'] = '\n'.join(HeaderComments
[StartPos
:]).replace(':#', '://')
1491 AsBuiltInfDict
['tail_comments'] = '\n'.join(self
.Module
.TailComments
)
1494 (self
.ProtocolList
, self
._ProtocolComments
, 'protocol_item'),
1495 (self
.PpiList
, self
._PpiComments
, 'ppi_item'),
1496 (GuidList
, self
._GuidComments
, 'guid_item')
1498 for Item
in GenList
:
1499 for CName
in Item
[0]:
1500 Comments
= '\n '.join(Item
[1][CName
]) if CName
in Item
[1] else ''
1501 Entry
= Comments
+ '\n ' + CName
if Comments
else CName
1502 AsBuiltInfDict
[Item
[2]].append(Entry
)
1503 PatchList
= parsePcdInfoFromMapFile(
1504 os
.path
.join(self
.OutputDir
, self
.Name
+ '.map'),
1505 os
.path
.join(self
.OutputDir
, self
.Name
+ '.efi')
1508 for Pcd
in PatchablePcds
:
1509 TokenCName
= Pcd
.TokenCName
1510 for PcdItem
in GlobalData
.MixedPcd
:
1511 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1512 TokenCName
= PcdItem
[0]
1514 for PatchPcd
in PatchList
:
1515 if TokenCName
== PatchPcd
[0]:
1520 if Pcd
.DatumType
== 'BOOLEAN':
1521 BoolValue
= Pcd
.DefaultValue
.upper()
1522 if BoolValue
== 'TRUE':
1523 Pcd
.DefaultValue
= '1'
1524 elif BoolValue
== 'FALSE':
1525 Pcd
.DefaultValue
= '0'
1527 if Pcd
.DatumType
in TAB_PCD_NUMERIC_TYPES
:
1528 HexFormat
= '0x%02x'
1529 if Pcd
.DatumType
== TAB_UINT16
:
1530 HexFormat
= '0x%04x'
1531 elif Pcd
.DatumType
== TAB_UINT32
:
1532 HexFormat
= '0x%08x'
1533 elif Pcd
.DatumType
== TAB_UINT64
:
1534 HexFormat
= '0x%016x'
1535 PcdValue
= HexFormat
% int(Pcd
.DefaultValue
, 0)
1537 if Pcd
.MaxDatumSize
is None or Pcd
.MaxDatumSize
== '':
1538 EdkLogger
.error("build", AUTOGEN_ERROR
,
1539 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1541 ArraySize
= int(Pcd
.MaxDatumSize
, 0)
1542 PcdValue
= Pcd
.DefaultValue
1543 if PcdValue
[0] != '{':
1545 if PcdValue
[0] == 'L':
1547 PcdValue
= PcdValue
.lstrip('L')
1548 PcdValue
= eval(PcdValue
)
1550 for Index
in range(0, len(PcdValue
)):
1552 CharVal
= ord(PcdValue
[Index
])
1553 NewValue
= NewValue
+ '0x%02x' % (CharVal
& 0x00FF) + ', ' \
1554 + '0x%02x' % (CharVal
>> 8) + ', '
1556 NewValue
= NewValue
+ '0x%02x' % (ord(PcdValue
[Index
]) % 0x100) + ', '
1559 Padding
= Padding
* 2
1560 ArraySize
= ArraySize
// 2
1561 if ArraySize
< (len(PcdValue
) + 1):
1562 if Pcd
.MaxSizeUserSet
:
1563 EdkLogger
.error("build", AUTOGEN_ERROR
,
1564 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1567 ArraySize
= len(PcdValue
) + 1
1568 if ArraySize
> len(PcdValue
) + 1:
1569 NewValue
= NewValue
+ Padding
* (ArraySize
- len(PcdValue
) - 1)
1570 PcdValue
= NewValue
+ Padding
.strip().rstrip(',') + '}'
1571 elif len(PcdValue
.split(',')) <= ArraySize
:
1572 PcdValue
= PcdValue
.rstrip('}') + ', 0x00' * (ArraySize
- len(PcdValue
.split(',')))
1575 if Pcd
.MaxSizeUserSet
:
1576 EdkLogger
.error("build", AUTOGEN_ERROR
,
1577 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1580 ArraySize
= len(PcdValue
) + 1
1581 PcdItem
= '%s.%s|%s|0x%X' % \
1582 (Pcd
.TokenSpaceGuidCName
, TokenCName
, PcdValue
, PatchPcd
[1])
1584 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1585 PcdComments
= '\n '.join(self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
])
1587 PcdItem
= PcdComments
+ '\n ' + PcdItem
1588 AsBuiltInfDict
['patchablepcd_item'].append(PcdItem
)
1590 for Pcd
in Pcds
+ VfrPcds
:
1593 TokenCName
= Pcd
.TokenCName
1594 for PcdItem
in GlobalData
.MixedPcd
:
1595 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1596 TokenCName
= PcdItem
[0]
1598 if Pcd
.Type
== TAB_PCDS_DYNAMIC_EX_HII
:
1599 for SkuName
in Pcd
.SkuInfoList
:
1600 SkuInfo
= Pcd
.SkuInfoList
[SkuName
]
1601 HiiInfo
= '## %s|%s|%s' % (SkuInfo
.VariableName
, SkuInfo
.VariableGuid
, SkuInfo
.VariableOffset
)
1603 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1604 PcdCommentList
= self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
][:]
1608 for Index
, Comment
in enumerate(PcdCommentList
):
1609 for Usage
in UsageList
:
1610 if Comment
.find(Usage
) != -1:
1614 if UsageIndex
!= -1:
1615 PcdCommentList
[UsageIndex
] = '## %s %s %s' % (UsageStr
, HiiInfo
, PcdCommentList
[UsageIndex
].replace(UsageStr
, ''))
1617 PcdCommentList
.append('## UNDEFINED ' + HiiInfo
)
1618 PcdComments
= '\n '.join(PcdCommentList
)
1619 PcdEntry
= Pcd
.TokenSpaceGuidCName
+ '.' + TokenCName
1621 PcdEntry
= PcdComments
+ '\n ' + PcdEntry
1622 AsBuiltInfDict
['pcd_item'].append(PcdEntry
)
1623 for Item
in self
.BuildOption
:
1624 if 'FLAGS' in self
.BuildOption
[Item
]:
1625 AsBuiltInfDict
['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self
.ToolChainFamily
, self
.BuildTarget
, self
.ToolChain
, self
.Arch
, Item
, self
.BuildOption
[Item
]['FLAGS'].strip()))
1627 # Generated LibraryClasses section in comments.
1628 for Library
in self
.LibraryAutoGenList
:
1629 AsBuiltInfDict
['libraryclasses_item'].append(Library
.MetaFile
.File
.replace('\\', '/'))
1631 # Generated UserExtensions TianoCore section.
1632 # All tianocore user extensions are copied.
1634 for TianoCore
in self
._GetTianoCoreUserExtensionList
():
1635 UserExtStr
+= '\n'.join(TianoCore
)
1636 ExtensionFile
= os
.path
.join(self
.MetaFile
.Dir
, TianoCore
[1])
1637 if os
.path
.isfile(ExtensionFile
):
1638 shutil
.copy2(ExtensionFile
, self
.OutputDir
)
1639 AsBuiltInfDict
['userextension_tianocore_item'] = UserExtStr
1641 # Generated depex expression section in comments.
1642 DepexExpression
= self
._GetDepexExpresionString
()
1643 AsBuiltInfDict
['depexsection_item'] = DepexExpression
if DepexExpression
else ''
1645 AsBuiltInf
= TemplateString()
1646 AsBuiltInf
.Append(gAsBuiltInfHeaderString
.Replace(AsBuiltInfDict
))
1648 SaveFileOnChange(os
.path
.join(self
.OutputDir
, self
.Name
+ '.inf'), str(AsBuiltInf
), False)
1650 self
.IsAsBuiltInfCreated
= True
1652 def CacheCopyFile(self
, DestDir
, SourceDir
, File
):
1653 if os
.path
.isdir(File
):
1656 sub_dir
= os
.path
.relpath(File
, SourceDir
)
1657 destination_file
= os
.path
.join(DestDir
, sub_dir
)
1658 destination_dir
= os
.path
.dirname(destination_file
)
1659 CreateDirectory(destination_dir
)
1661 CopyFileOnChange(File
, destination_dir
)
1663 EdkLogger
.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File
, destination_dir
))
1666 def CopyModuleToCache(self
):
1667 # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList
1668 # and PreMakeHashFileList files
1670 PreMakeHashStr
= None
1672 PreMakeTimeStamp
= 0
1673 Files
= [f
for f
in os
.listdir(LongFilePath(self
.BuildDir
)) if path
.isfile(LongFilePath(path
.join(self
.BuildDir
, f
)))]
1675 if ".MakeHashFileList." in File
:
1676 #find lastest file through time stamp
1677 FileTimeStamp
= os
.stat(LongFilePath(path
.join(self
.BuildDir
, File
)))[8]
1678 if FileTimeStamp
> MakeTimeStamp
:
1679 MakeTimeStamp
= FileTimeStamp
1680 MakeHashStr
= File
.split('.')[-1]
1681 if len(MakeHashStr
) != 32:
1682 EdkLogger
.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File
))
1683 if ".PreMakeHashFileList." in File
:
1684 FileTimeStamp
= os
.stat(LongFilePath(path
.join(self
.BuildDir
, File
)))[8]
1685 if FileTimeStamp
> PreMakeTimeStamp
:
1686 PreMakeTimeStamp
= FileTimeStamp
1687 PreMakeHashStr
= File
.split('.')[-1]
1688 if len(PreMakeHashStr
) != 32:
1689 EdkLogger
.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File
))
1692 EdkLogger
.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1694 if not PreMakeHashStr
:
1695 EdkLogger
.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1698 # Create Cache destination dirs
1699 FileDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
1700 FfsDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
1701 CacheFileDir
= path
.join(FileDir
, MakeHashStr
)
1702 CacheFfsDir
= path
.join(FfsDir
, MakeHashStr
)
1703 CreateDirectory (CacheFileDir
)
1704 CreateDirectory (CacheFfsDir
)
1706 # Create ModuleHashPair file to support multiple version cache together
1707 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
1708 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1709 if os
.path
.exists(ModuleHashPair
):
1710 with
open(ModuleHashPair
, 'r') as f
:
1711 ModuleHashPairList
= json
.load(f
)
1712 if not (PreMakeHashStr
, MakeHashStr
) in set(map(tuple, ModuleHashPairList
)):
1713 ModuleHashPairList
.insert(0, (PreMakeHashStr
, MakeHashStr
))
1714 with
open(ModuleHashPair
, 'w') as f
:
1715 json
.dump(ModuleHashPairList
, f
, indent
=2)
1717 # Copy files to Cache destination dirs
1718 if not self
.OutputFile
:
1719 Ma
= self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
1720 self
.OutputFile
= Ma
.Binaries
1721 for File
in self
.OutputFile
:
1722 if File
.startswith(os
.path
.abspath(self
.FfsOutputDir
)+os
.sep
):
1723 self
.CacheCopyFile(CacheFfsDir
, self
.FfsOutputDir
, File
)
1725 if self
.Name
+ ".autogen.hash." in File
or \
1726 self
.Name
+ ".autogen.hashchain." in File
or \
1727 self
.Name
+ ".hash." in File
or \
1728 self
.Name
+ ".hashchain." in File
or \
1729 self
.Name
+ ".PreMakeHashFileList." in File
or \
1730 self
.Name
+ ".MakeHashFileList." in File
:
1731 self
.CacheCopyFile(FileDir
, self
.BuildDir
, File
)
1733 self
.CacheCopyFile(CacheFileDir
, self
.BuildDir
, File
)
1734 ## Create makefile for the module and its dependent libraries
1736 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1737 # dependent libraries will be created
1739 @cached_class_function
1740 def CreateMakeFile(self
, CreateLibraryMakeFile
=True, GenFfsList
= []):
1742 # nest this function inside it's only caller.
1743 def CreateTimeStamp():
1744 FileSet
= {self
.MetaFile
.Path
}
1746 for SourceFile
in self
.Module
.Sources
:
1747 FileSet
.add (SourceFile
.Path
)
1749 for Lib
in self
.DependentLibraryList
:
1750 FileSet
.add (Lib
.MetaFile
.Path
)
1752 for f
in self
.AutoGenDepSet
:
1753 FileSet
.add (f
.Path
)
1755 if os
.path
.exists (self
.TimeStampPath
):
1756 os
.remove (self
.TimeStampPath
)
1758 SaveFileOnChange(self
.TimeStampPath
, "\n".join(FileSet
), False)
1760 # Ignore generating makefile when it is a binary module
1761 if self
.IsBinaryModule
:
1764 self
.GenFfsList
= GenFfsList
1766 if not self
.IsLibrary
and CreateLibraryMakeFile
:
1767 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1768 LibraryAutoGen
.CreateMakeFile()
1770 # CanSkip uses timestamps to determine build skipping
1774 if len(self
.CustomMakefile
) == 0:
1775 Makefile
= GenMake
.ModuleMakefile(self
)
1777 Makefile
= GenMake
.CustomMakefile(self
)
1778 if Makefile
.Generate():
1779 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated makefile for module %s [%s]" %
1780 (self
.Name
, self
.Arch
))
1782 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of makefile for module %s [%s]" %
1783 (self
.Name
, self
.Arch
))
1787 MakefileType
= Makefile
._FileType
1788 MakefileName
= Makefile
._FILE
_NAME
_[MakefileType
]
1789 MakefilePath
= os
.path
.join(self
.MakeFileDir
, MakefileName
)
1790 FilePath
= path
.join(self
.BuildDir
, self
.Name
+ ".makefile")
1791 SaveFileOnChange(FilePath
, MakefilePath
, False)
1793 def CopyBinaryFiles(self
):
1794 for File
in self
.Module
.Binaries
:
1796 DstPath
= os
.path
.join(self
.OutputDir
, os
.path
.basename(SrcPath
))
1797 CopyLongFilePath(SrcPath
, DstPath
)
1798 ## Create autogen code for the module and its dependent libraries
1800 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1801 # dependent libraries will be created
1803 def CreateCodeFile(self
, CreateLibraryCodeFile
=True):
1805 if self
.IsCodeFileCreated
:
1808 # Need to generate PcdDatabase even PcdDriver is binarymodule
1809 if self
.IsBinaryModule
and self
.PcdIsDriver
!= '':
1810 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
1812 if self
.IsBinaryModule
:
1814 self
.CopyBinaryFiles()
1817 if not self
.IsLibrary
and CreateLibraryCodeFile
:
1818 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1819 LibraryAutoGen
.CreateCodeFile()
1821 # CanSkip uses timestamps to determine build skipping
1824 self
.LibraryAutoGenList
1826 IgoredAutoGenList
= []
1828 for File
in self
.AutoGenFileList
:
1829 if GenC
.Generate(File
.Path
, self
.AutoGenFileList
[File
], File
.IsBinary
):
1830 AutoGenList
.append(str(File
))
1832 IgoredAutoGenList
.append(str(File
))
1835 for ModuleType
in self
.DepexList
:
1836 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1837 if len(self
.DepexList
[ModuleType
]) == 0 or ModuleType
== SUP_MODULE_USER_DEFINED
or ModuleType
== SUP_MODULE_HOST_APPLICATION
:
1840 Dpx
= GenDepex
.DependencyExpression(self
.DepexList
[ModuleType
], ModuleType
, True)
1841 DpxFile
= gAutoGenDepexFileName
% {"module_name" : self
.Name
}
1843 if len(Dpx
.PostfixNotation
) != 0:
1844 self
.DepexGenerated
= True
1846 if Dpx
.Generate(path
.join(self
.OutputDir
, DpxFile
)):
1847 AutoGenList
.append(str(DpxFile
))
1849 IgoredAutoGenList
.append(str(DpxFile
))
1851 if IgoredAutoGenList
== []:
1852 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] files for module %s [%s]" %
1853 (" ".join(AutoGenList
), self
.Name
, self
.Arch
))
1854 elif AutoGenList
== []:
1855 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of [%s] files for module %s [%s]" %
1856 (" ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1858 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] (skipped %s) files for module %s [%s]" %
1859 (" ".join(AutoGenList
), " ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1861 self
.IsCodeFileCreated
= True
1865 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1867 def LibraryAutoGenList(self
):
1869 for Library
in self
.DependentLibraryList
:
1876 self
.PlatformInfo
.MetaFile
,
1880 if La
not in RetVal
:
1882 for Lib
in La
.CodaTargetList
:
1883 self
._ApplyBuildRule
(Lib
.Target
, TAB_UNKNOWN_FILE
)
1886 def GenCMakeHash(self
):
1887 # GenCMakeHash can only be called in --binary-destination
1888 # Never called in multiprocessing and always directly save result in main process,
1889 # so no need remote dict to share the gCMakeHashFile result with main process
1891 DependencyFileSet
= set()
1893 if self
.AutoGenFileList
:
1894 for File
in set(self
.AutoGenFileList
):
1895 DependencyFileSet
.add(File
)
1898 abspath
= path
.join(self
.BuildDir
, self
.Name
+ ".makefile")
1900 with
open(LongFilePath(abspath
),"r") as fd
:
1901 lines
= fd
.readlines()
1902 except Exception as e
:
1903 EdkLogger
.error("build",FILE_NOT_FOUND
, "%s doesn't exist" % abspath
, ExtraData
=str(e
), RaiseError
=False)
1905 DependencyFileSet
.update(lines
)
1907 # Caculate all above dependency files hash
1908 # Initialze hash object
1911 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
1912 if not path
.exists(LongFilePath(str(File
))):
1913 EdkLogger
.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
1915 with
open(LongFilePath(str(File
)), 'rb') as f
:
1918 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
1920 HashChainFile
= path
.join(self
.BuildDir
, self
.Name
+ ".autogen.hashchain." + m
.hexdigest())
1921 GlobalData
.gCMakeHashFile
[(self
.MetaFile
.Path
, self
.Arch
)] = HashChainFile
1923 with
open(LongFilePath(HashChainFile
), 'w') as f
:
1924 json
.dump(FileList
, f
, indent
=2)
1926 EdkLogger
.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile
)
1929 def GenModuleHash(self
):
1930 # GenModuleHash only called after autogen phase
1931 # Never called in multiprocessing and always directly save result in main process,
1932 # so no need remote dict to share the gModuleHashFile result with main process
1934 # GenPreMakefileHashList consume no dict.
1935 # GenPreMakefileHashList produce local gModuleHashFile dict.
1937 DependencyFileSet
= set()
1938 # Add Module Meta file
1939 DependencyFileSet
.add(self
.MetaFile
.Path
)
1941 # Add Module's source files
1942 if self
.SourceFileList
:
1943 for File
in set(self
.SourceFileList
):
1944 DependencyFileSet
.add(File
.Path
)
1946 # Add modules's include header files
1947 # Directly use the deps.txt file in the module BuildDir
1948 abspath
= path
.join(self
.BuildDir
, "deps.txt")
1951 with
open(LongFilePath(abspath
),"r") as fd
:
1952 lines
= fd
.readlines()
1954 rt
= set([item
.lstrip().strip("\n") for item
in lines
if item
.strip("\n").endswith(".h")])
1955 except Exception as e
:
1956 EdkLogger
.error("build",FILE_NOT_FOUND
, "%s doesn't exist" % abspath
, ExtraData
=str(e
), RaiseError
=False)
1959 DependencyFileSet
.update(rt
)
1962 # Caculate all above dependency files hash
1963 # Initialze hash object
1966 BuildDirStr
= path
.abspath(self
.BuildDir
).lower()
1967 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
1968 # Skip the AutoGen files in BuildDir which already been
1969 # included in .autogen.hash. file
1970 if BuildDirStr
in path
.abspath(File
).lower():
1972 if not path
.exists(LongFilePath(File
)):
1973 EdkLogger
.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
1975 with
open(LongFilePath(File
), 'rb') as f
:
1978 FileList
.append((File
, hashlib
.md5(Content
).hexdigest()))
1980 HashChainFile
= path
.join(self
.BuildDir
, self
.Name
+ ".hashchain." + m
.hexdigest())
1981 GlobalData
.gModuleHashFile
[(self
.MetaFile
.Path
, self
.Arch
)] = HashChainFile
1983 with
open(LongFilePath(HashChainFile
), 'w') as f
:
1984 json
.dump(FileList
, f
, indent
=2)
1986 EdkLogger
.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile
)
1989 def GenPreMakefileHashList(self
):
1990 # GenPreMakefileHashList consume below dicts:
1994 # GenPreMakefileHashList produce no dict.
1995 # gModuleHashFile items might be produced in multiprocessing, so
1996 # need check gModuleHashFile remote dict
1998 # skip binary module
1999 if self
.IsBinaryModule
:
2004 # Add Platform level hash
2005 HashFile
= GlobalData
.gPlatformHashFile
2006 if path
.exists(LongFilePath(HashFile
)):
2007 FileList
.append(HashFile
)
2008 m
.update(HashFile
.encode('utf-8'))
2010 EdkLogger
.quiet("[cache warning]: No Platform HashFile: %s" % HashFile
)
2012 # Add Package level hash
2013 if self
.DependentPackageList
:
2014 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
2015 if not (Pkg
.PackageName
, Pkg
.Arch
) in GlobalData
.gPackageHashFile
:
2016 EdkLogger
.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg
.PackageName
, self
.MetaFile
.Path
, self
.Arch
))
2018 HashFile
= GlobalData
.gPackageHashFile
[(Pkg
.PackageName
, Pkg
.Arch
)]
2019 if path
.exists(LongFilePath(HashFile
)):
2020 FileList
.append(HashFile
)
2021 m
.update(HashFile
.encode('utf-8'))
2023 EdkLogger
.quiet("[cache warning]:No Package HashFile: %s" % HashFile
)
2026 # GenPreMakefileHashList needed in both --binary-destination
2027 # and --hash. And --hash might save ModuleHashFile in remote dict
2028 # during multiprocessing.
2029 if (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gModuleHashFile
:
2030 HashFile
= GlobalData
.gModuleHashFile
[(self
.MetaFile
.Path
, self
.Arch
)]
2032 EdkLogger
.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2033 if path
.exists(LongFilePath(HashFile
)):
2034 FileList
.append(HashFile
)
2035 m
.update(HashFile
.encode('utf-8'))
2037 EdkLogger
.quiet("[cache warning]:No Module HashFile: %s" % HashFile
)
2040 if self
.LibraryAutoGenList
:
2041 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.MetaFile
.Path
):
2043 if (Lib
.MetaFile
.Path
, Lib
.Arch
) in GlobalData
.gModuleHashFile
:
2044 HashFile
= GlobalData
.gModuleHashFile
[(Lib
.MetaFile
.Path
, Lib
.Arch
)]
2046 EdkLogger
.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib
.MetaFile
.Path
, Lib
.Arch
))
2047 if path
.exists(LongFilePath(HashFile
)):
2048 FileList
.append(HashFile
)
2049 m
.update(HashFile
.encode('utf-8'))
2051 EdkLogger
.quiet("[cache warning]:No Lib HashFile: %s" % HashFile
)
2053 # Save PreMakeHashFileList
2054 FilePath
= path
.join(self
.BuildDir
, self
.Name
+ ".PreMakeHashFileList." + m
.hexdigest())
2056 with
open(LongFilePath(FilePath
), 'w') as f
:
2057 json
.dump(FileList
, f
, indent
=0)
2059 EdkLogger
.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath
)
2061 def GenMakefileHashList(self
):
2062 # GenMakefileHashList only need in --binary-destination which will
2063 # everything in local dict. So don't need check remote dict.
2065 # skip binary module
2066 if self
.IsBinaryModule
:
2072 HashFile
= GlobalData
.gCMakeHashFile
[(self
.MetaFile
.Path
, self
.Arch
)]
2073 if path
.exists(LongFilePath(HashFile
)):
2074 FileList
.append(HashFile
)
2075 m
.update(HashFile
.encode('utf-8'))
2077 EdkLogger
.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile
)
2080 if (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gModuleHashFile
:
2081 HashFile
= GlobalData
.gModuleHashFile
[(self
.MetaFile
.Path
, self
.Arch
)]
2083 EdkLogger
.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2084 if path
.exists(LongFilePath(HashFile
)):
2085 FileList
.append(HashFile
)
2086 m
.update(HashFile
.encode('utf-8'))
2088 EdkLogger
.quiet("[cache warning]:No Module HashFile: %s" % HashFile
)
2091 if self
.LibraryAutoGenList
:
2092 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.MetaFile
.Path
):
2093 if (Lib
.MetaFile
.Path
, Lib
.Arch
) in GlobalData
.gModuleHashFile
:
2094 HashFile
= GlobalData
.gModuleHashFile
[(Lib
.MetaFile
.Path
, Lib
.Arch
)]
2096 EdkLogger
.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib
.MetaFile
.Path
, Lib
.Arch
))
2097 if path
.exists(LongFilePath(HashFile
)):
2098 FileList
.append(HashFile
)
2099 m
.update(HashFile
.encode('utf-8'))
2101 EdkLogger
.quiet("[cache warning]:No Lib HashFile: %s" % HashFile
)
2103 # Save MakeHashFileList
2104 FilePath
= path
.join(self
.BuildDir
, self
.Name
+ ".MakeHashFileList." + m
.hexdigest())
2106 with
open(LongFilePath(FilePath
), 'w') as f
:
2107 json
.dump(FileList
, f
, indent
=0)
2109 EdkLogger
.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath
)
2111 def CheckHashChainFile(self
, HashChainFile
):
2112 # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'
2113 # The x is module name and the 16BytesHexStr is md5 hexdigest of
2114 # all hashchain files content
2115 HashStr
= HashChainFile
.split('.')[-1]
2116 if len(HashStr
) != 32:
2117 EdkLogger
.quiet("[cache error]: wrong format HashChainFile:%s" % (File
))
2121 with
open(LongFilePath(HashChainFile
), 'r') as f
:
2122 HashChainList
= json
.load(f
)
2124 EdkLogger
.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile
)
2127 # Print the different file info
2128 # print(HashChainFile)
2129 for idx
, (SrcFile
, SrcHash
) in enumerate (HashChainList
):
2130 if SrcFile
in GlobalData
.gFileHashDict
:
2131 DestHash
= GlobalData
.gFileHashDict
[SrcFile
]
2134 with
open(LongFilePath(SrcFile
), 'rb') as f
:
2136 DestHash
= hashlib
.md5(Content
).hexdigest()
2137 GlobalData
.gFileHashDict
[SrcFile
] = DestHash
2138 except IOError as X
:
2139 # cache miss if SrcFile is removed in new version code
2140 GlobalData
.gFileHashDict
[SrcFile
] = 0
2141 EdkLogger
.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile
, SrcFile
))
2143 if SrcHash
!= DestHash
:
2144 EdkLogger
.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile
, SrcFile
))
2149 ## Decide whether we can skip the left autogen and make process
2150 def CanSkipbyMakeCache(self
):
2151 # For --binary-source only
2152 # CanSkipbyMakeCache consume below dicts:
2153 # gModuleMakeCacheStatus
2155 # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.
2156 # all these dicts might be produced in multiprocessing, so
2157 # need check these remote dict
2159 if not GlobalData
.gBinCacheSource
:
2162 if (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gModuleMakeCacheStatus
:
2163 return GlobalData
.gModuleMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)]
2165 # If Module is binary, which has special build rule, do not skip by cache.
2166 if self
.IsBinaryModule
:
2167 print("[cache miss]: MakeCache: Skip BinaryModule:", self
.MetaFile
.Path
, self
.Arch
)
2168 GlobalData
.gModuleMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2171 # see .inc as binary file, do not skip by hash
2172 for f_ext
in self
.SourceFileList
:
2173 if '.inc' in str(f_ext
):
2174 print("[cache miss]: MakeCache: Skip '.inc' File:", self
.MetaFile
.Path
, self
.Arch
)
2175 GlobalData
.gModuleMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2178 ModuleCacheDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2179 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2181 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2182 ModuleHashPair
= path
.join(ModuleCacheDir
, self
.Name
+ ".ModuleHashPair")
2184 with
open(LongFilePath(ModuleHashPair
), 'r') as f
:
2185 ModuleHashPairList
= json
.load(f
)
2187 # ModuleHashPair might not exist for new added module
2188 GlobalData
.gModuleMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2189 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2190 print("[cache miss]: MakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2193 # Check the PreMakeHash in ModuleHashPairList one by one
2194 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2195 SourceHashDir
= path
.join(ModuleCacheDir
, MakeHash
)
2196 SourceFfsHashDir
= path
.join(FfsDir
, MakeHash
)
2197 PreMakeHashFileList_FilePah
= path
.join(ModuleCacheDir
, self
.Name
+ ".PreMakeHashFileList." + PreMakefileHash
)
2198 MakeHashFileList_FilePah
= path
.join(ModuleCacheDir
, self
.Name
+ ".MakeHashFileList." + MakeHash
)
2201 with
open(LongFilePath(MakeHashFileList_FilePah
), 'r') as f
:
2202 MakeHashFileList
= json
.load(f
)
2204 EdkLogger
.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah
)
2208 for HashChainFile
in MakeHashFileList
:
2209 HashChainStatus
= None
2210 if HashChainFile
in GlobalData
.gHashChainStatus
:
2211 HashChainStatus
= GlobalData
.gHashChainStatus
[HashChainFile
]
2212 if HashChainStatus
== False:
2215 elif HashChainStatus
== True:
2217 # Convert to path start with cache source dir
2218 RelativePath
= os
.path
.relpath(HashChainFile
, self
.WorkspaceDir
)
2219 NewFilePath
= os
.path
.join(GlobalData
.gBinCacheSource
, RelativePath
)
2220 if self
.CheckHashChainFile(NewFilePath
):
2221 GlobalData
.gHashChainStatus
[HashChainFile
] = True
2222 # Save the module self HashFile for GenPreMakefileHashList later usage
2223 if self
.Name
+ ".hashchain." in HashChainFile
:
2224 GlobalData
.gModuleHashFile
[(self
.MetaFile
.Path
, self
.Arch
)] = HashChainFile
2226 GlobalData
.gHashChainStatus
[HashChainFile
] = False
2233 # PreMakefile cache hit, restore the module build result
2234 for root
, dir, files
in os
.walk(SourceHashDir
):
2236 File
= path
.join(root
, f
)
2237 self
.CacheCopyFile(self
.BuildDir
, SourceHashDir
, File
)
2238 if os
.path
.exists(SourceFfsHashDir
):
2239 for root
, dir, files
in os
.walk(SourceFfsHashDir
):
2241 File
= path
.join(root
, f
)
2242 self
.CacheCopyFile(self
.FfsOutputDir
, SourceFfsHashDir
, File
)
2244 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2245 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2247 print("[cache hit]: MakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2248 GlobalData
.gModuleMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = True
2251 print("[cache miss]: MakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2252 GlobalData
.gModuleMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2255 ## Decide whether we can skip the left autogen and make process
2256 def CanSkipbyPreMakeCache(self
):
2257 # CanSkipbyPreMakeCache consume below dicts:
2258 # gModulePreMakeCacheStatus
2261 # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.
2262 # all these dicts might be produced in multiprocessing, so
2263 # need check these remote dicts
2265 if not GlobalData
.gUseHashCache
or GlobalData
.gBinCacheDest
:
2268 if (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gModulePreMakeCacheStatus
:
2269 return GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)]
2271 # If Module is binary, which has special build rule, do not skip by cache.
2272 if self
.IsBinaryModule
:
2273 print("[cache miss]: PreMakeCache: Skip BinaryModule:", self
.MetaFile
.Path
, self
.Arch
)
2274 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2277 # see .inc as binary file, do not skip by hash
2278 for f_ext
in self
.SourceFileList
:
2279 if '.inc' in str(f_ext
):
2280 print("[cache miss]: PreMakeCache: Skip '.inc' File:", self
.MetaFile
.Path
, self
.Arch
)
2281 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2284 # For --hash only in the incremental build
2285 if not GlobalData
.gBinCacheSource
:
2286 Files
= [path
.join(self
.BuildDir
, f
) for f
in os
.listdir(self
.BuildDir
) if path
.isfile(path
.join(self
.BuildDir
, f
))]
2287 PreMakeHashFileList_FilePah
= None
2289 # Find latest PreMakeHashFileList file in self.BuildDir folder
2291 if ".PreMakeHashFileList." in File
:
2292 FileTimeStamp
= os
.stat(path
.join(self
.BuildDir
, File
))[8]
2293 if FileTimeStamp
> MakeTimeStamp
:
2294 MakeTimeStamp
= FileTimeStamp
2295 PreMakeHashFileList_FilePah
= File
2296 if not PreMakeHashFileList_FilePah
:
2297 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2301 with
open(LongFilePath(PreMakeHashFileList_FilePah
), 'r') as f
:
2302 PreMakeHashFileList
= json
.load(f
)
2304 EdkLogger
.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah
)
2305 print("[cache miss]: PreMakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2306 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2310 for HashChainFile
in PreMakeHashFileList
:
2311 HashChainStatus
= None
2312 if HashChainFile
in GlobalData
.gHashChainStatus
:
2313 HashChainStatus
= GlobalData
.gHashChainStatus
[HashChainFile
]
2314 if HashChainStatus
== False:
2317 elif HashChainStatus
== True:
2319 if self
.CheckHashChainFile(HashChainFile
):
2320 GlobalData
.gHashChainStatus
[HashChainFile
] = True
2321 # Save the module self HashFile for GenPreMakefileHashList later usage
2322 if self
.Name
+ ".hashchain." in HashChainFile
:
2323 GlobalData
.gModuleHashFile
[(self
.MetaFile
.Path
, self
.Arch
)] = HashChainFile
2325 GlobalData
.gHashChainStatus
[HashChainFile
] = False
2330 print("[cache miss]: PreMakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2331 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2334 print("[cache hit]: PreMakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2335 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = True
2338 ModuleCacheDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2339 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2341 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2342 ModuleHashPair
= path
.join(ModuleCacheDir
, self
.Name
+ ".ModuleHashPair")
2344 with
open(LongFilePath(ModuleHashPair
), 'r') as f
:
2345 ModuleHashPairList
= json
.load(f
)
2347 # ModuleHashPair might not exist for new added module
2348 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2349 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2350 print("[cache miss]: PreMakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2353 # Check the PreMakeHash in ModuleHashPairList one by one
2354 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2355 SourceHashDir
= path
.join(ModuleCacheDir
, MakeHash
)
2356 SourceFfsHashDir
= path
.join(FfsDir
, MakeHash
)
2357 PreMakeHashFileList_FilePah
= path
.join(ModuleCacheDir
, self
.Name
+ ".PreMakeHashFileList." + PreMakefileHash
)
2358 MakeHashFileList_FilePah
= path
.join(ModuleCacheDir
, self
.Name
+ ".MakeHashFileList." + MakeHash
)
2361 with
open(LongFilePath(PreMakeHashFileList_FilePah
), 'r') as f
:
2362 PreMakeHashFileList
= json
.load(f
)
2364 EdkLogger
.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah
)
2368 for HashChainFile
in PreMakeHashFileList
:
2369 HashChainStatus
= None
2370 if HashChainFile
in GlobalData
.gHashChainStatus
:
2371 HashChainStatus
= GlobalData
.gHashChainStatus
[HashChainFile
]
2372 if HashChainStatus
== False:
2375 elif HashChainStatus
== True:
2377 # Convert to path start with cache source dir
2378 RelativePath
= os
.path
.relpath(HashChainFile
, self
.WorkspaceDir
)
2379 NewFilePath
= os
.path
.join(GlobalData
.gBinCacheSource
, RelativePath
)
2380 if self
.CheckHashChainFile(NewFilePath
):
2381 GlobalData
.gHashChainStatus
[HashChainFile
] = True
2383 GlobalData
.gHashChainStatus
[HashChainFile
] = False
2390 # PreMakefile cache hit, restore the module build result
2391 for root
, dir, files
in os
.walk(SourceHashDir
):
2393 File
= path
.join(root
, f
)
2394 self
.CacheCopyFile(self
.BuildDir
, SourceHashDir
, File
)
2395 if os
.path
.exists(SourceFfsHashDir
):
2396 for root
, dir, files
in os
.walk(SourceFfsHashDir
):
2398 File
= path
.join(root
, f
)
2399 self
.CacheCopyFile(self
.FfsOutputDir
, SourceFfsHashDir
, File
)
2401 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2402 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2404 print("[cache hit]: PreMakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2405 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = True
2408 print("[cache miss]: PreMakeCache:", self
.MetaFile
.Path
, self
.Arch
)
2409 GlobalData
.gModulePreMakeCacheStatus
[(self
.MetaFile
.Path
, self
.Arch
)] = False
2412 ## Decide whether we can skip the Module build
2413 def CanSkipbyCache(self
, gHitSet
):
2414 # Hashing feature is off
2415 if not GlobalData
.gBinCacheSource
:
2423 ## Decide whether we can skip the ModuleAutoGen process
2424 # If any source file is newer than the module than we cannot skip
2427 # Don't skip if cache feature enabled
2428 if GlobalData
.gUseHashCache
or GlobalData
.gBinCacheDest
or GlobalData
.gBinCacheSource
:
2430 if self
.MakeFileDir
in GlobalData
.gSikpAutoGenCache
:
2432 if not os
.path
.exists(self
.TimeStampPath
):
2434 #last creation time of the module
2435 DstTimeStamp
= os
.stat(self
.TimeStampPath
)[8]
2437 SrcTimeStamp
= self
.Workspace
._SrcTimeStamp
2438 if SrcTimeStamp
> DstTimeStamp
:
2441 with
open(self
.TimeStampPath
,'r') as f
:
2443 source
= source
.rstrip('\n')
2444 if not os
.path
.exists(source
):
2446 if source
not in ModuleAutoGen
.TimeDict
:
2447 ModuleAutoGen
.TimeDict
[source
] = os
.stat(source
)[8]
2448 if ModuleAutoGen
.TimeDict
[source
] > DstTimeStamp
:
2450 GlobalData
.gSikpAutoGenCache
.add(self
.MakeFileDir
)
2454 def TimeStampPath(self
):
2455 return os
.path
.join(self
.MakeFileDir
, 'AutoGenTimeStamp')