2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 from AutoGen
.AutoGen
import AutoGen
9 from Common
.LongFilePathSupport
import CopyLongFilePath
10 from Common
.BuildToolError
import *
11 from Common
.DataType
import *
12 from Common
.Misc
import *
13 from Common
.StringUtils
import NormPath
,GetSplitList
14 from collections
import defaultdict
15 from Workspace
.WorkspaceCommon
import OrderedListDict
16 import os
.path
as path
19 from . import InfSectionParser
22 from . import GenDepex
23 from io
import BytesIO
24 from GenPatchPcdTable
.GenPatchPcdTable
import parsePcdInfoFromMapFile
25 from Workspace
.MetaFileCommentParser
import UsageList
26 from .GenPcdDb
import CreatePcdDatabaseCode
27 from Common
.caching
import cached_class_function
28 from AutoGen
.ModuleAutoGenHelper
import PlatformInfo
,WorkSpaceInfo
29 from AutoGen
.CacheIR
import ModuleBuildCacheIR
33 ## Mapping Makefile type
34 gMakeTypeMap
= {TAB_COMPILER_MSFT
:"nmake", "GCC":"gmake"}
36 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
37 # is the former use /I , the Latter used -I to specify include directories
39 gBuildOptIncludePatternMsft
= re
.compile(r
"(?:.*?)/I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
40 gBuildOptIncludePatternOther
= re
.compile(r
"(?:.*?)-I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
42 ## default file name for AutoGen
43 gAutoGenCodeFileName
= "AutoGen.c"
44 gAutoGenHeaderFileName
= "AutoGen.h"
45 gAutoGenStringFileName
= "%(module_name)sStrDefs.h"
46 gAutoGenStringFormFileName
= "%(module_name)sStrDefs.hpk"
47 gAutoGenDepexFileName
= "%(module_name)s.depex"
48 gAutoGenImageDefFileName
= "%(module_name)sImgDefs.h"
49 gAutoGenIdfFileName
= "%(module_name)sIdf.hpk"
50 gInfSpecVersion
= "0x00010017"
53 # Match name = variable
55 gEfiVarStoreNamePattern
= re
.compile("\s*name\s*=\s*(\w+)")
57 # The format of guid in efivarstore statement likes following and must be correct:
58 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
60 gEfiVarStoreGuidPattern
= re
.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
63 # Template string to generic AsBuilt INF
65 gAsBuiltInfHeaderString
= TemplateString("""${header_comments}
71 INF_VERSION = ${module_inf_version}
72 BASE_NAME = ${module_name}
73 FILE_GUID = ${module_guid}
74 MODULE_TYPE = ${module_module_type}${BEGIN}
75 VERSION_STRING = ${module_version_string}${END}${BEGIN}
76 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
77 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
78 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
79 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
80 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
81 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
82 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
83 SHADOW = ${module_shadow}${END}${BEGIN}
84 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
85 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
86 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
87 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
88 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
89 SPEC = ${module_spec}${END}${BEGIN}
90 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
91 MODULE_UNI_FILE = ${module_uni_file}${END}
93 [Packages.${module_arch}]${BEGIN}
96 [Binaries.${module_arch}]${BEGIN}
99 [PatchPcd.${module_arch}]${BEGIN}
103 [Protocols.${module_arch}]${BEGIN}
107 [Ppis.${module_arch}]${BEGIN}
111 [Guids.${module_arch}]${BEGIN}
115 [PcdEx.${module_arch}]${BEGIN}
119 [LibraryClasses.${module_arch}]
120 ## @LIB_INSTANCES${BEGIN}
121 # ${libraryclasses_item}${END}
125 ${userextension_tianocore_item}
129 [BuildOptions.${module_arch}]
131 ## ${flags_item}${END}
134 # extend lists contained in a dictionary with lists stored in another dictionary
135 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
137 def ExtendCopyDictionaryLists(CopyToDict
, CopyFromDict
):
138 for Key
in CopyFromDict
:
139 CopyToDict
[Key
].extend(CopyFromDict
[Key
])
141 # Create a directory specified by a set of path elements and return the full path
142 def _MakeDir(PathList
):
143 RetVal
= path
.join(*PathList
)
144 CreateDirectory(RetVal
)
148 # Convert string to C format array
150 def _ConvertStringToByteArray(Value
):
151 Value
= Value
.strip()
155 if not Value
.endswith('}'):
157 Value
= Value
.replace(' ', '').replace('{', '').replace('}', '')
158 ValFields
= Value
.split(',')
160 for Index
in range(len(ValFields
)):
161 ValFields
[Index
] = str(int(ValFields
[Index
], 0))
164 Value
= '{' + ','.join(ValFields
) + '}'
168 if Value
.startswith('L"'):
169 if not Value
.endswith('"'):
173 elif not Value
.startswith('"') or not Value
.endswith('"'):
176 Value
= eval(Value
) # translate escape character
178 for Index
in range(0, len(Value
)):
180 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x10000) + ','
182 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x100) + ','
183 Value
= NewValue
+ '0}'
186 ## ModuleAutoGen class
188 # This class encapsules the AutoGen behaviors for the build tools. In addition to
189 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
190 # to the [depex] section in module's inf file.
192 class ModuleAutoGen(AutoGen
):
193 # call super().__init__ then call the worker function with different parameter count
194 def __init__(self
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
195 if not hasattr(self
, "_Init"):
196 self
._InitWorker
(Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
)
199 ## Cache the timestamps of metafiles of every module in a class attribute
203 def __new__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
204 # check if this module is employed by active platform
205 if not PlatformInfo(Workspace
, args
[0], Target
, Toolchain
, Arch
,args
[-1]).ValidModule(MetaFile
):
206 EdkLogger
.verbose("Module [%s] for [%s] is not employed by active platform\n" \
209 return super(ModuleAutoGen
, cls
).__new
__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
)
211 ## Initialize ModuleAutoGen
213 # @param Workspace EdkIIWorkspaceBuild object
214 # @param ModuleFile The path of module file
215 # @param Target Build target (DEBUG, RELEASE)
216 # @param Toolchain Name of tool chain
217 # @param Arch The arch the module supports
218 # @param PlatformFile Platform meta-file
220 def _InitWorker(self
, Workspace
, ModuleFile
, Target
, Toolchain
, Arch
, PlatformFile
,DataPipe
):
221 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "AutoGen module [%s] [%s]" % (ModuleFile
, Arch
))
222 GlobalData
.gProcessingFile
= "%s [%s, %s, %s]" % (ModuleFile
, Arch
, Toolchain
, Target
)
224 self
.Workspace
= Workspace
225 self
.WorkspaceDir
= ""
226 self
.PlatformInfo
= None
227 self
.DataPipe
= DataPipe
228 self
.__init
_platform
_info
__()
229 self
.MetaFile
= ModuleFile
230 self
.SourceDir
= self
.MetaFile
.SubDir
231 self
.SourceDir
= mws
.relpath(self
.SourceDir
, self
.WorkspaceDir
)
233 self
.ToolChain
= Toolchain
234 self
.BuildTarget
= Target
236 self
.ToolChainFamily
= self
.PlatformInfo
.ToolChainFamily
237 self
.BuildRuleFamily
= self
.PlatformInfo
.BuildRuleFamily
239 self
.IsCodeFileCreated
= False
240 self
.IsAsBuiltInfCreated
= False
241 self
.DepexGenerated
= False
243 self
.BuildDatabase
= self
.Workspace
.BuildDatabase
244 self
.BuildRuleOrder
= None
247 self
._GuidComments
= OrderedListDict()
248 self
._ProtocolComments
= OrderedListDict()
249 self
._PpiComments
= OrderedListDict()
250 self
._BuildTargets
= None
251 self
._IntroBuildTargetList
= None
252 self
._FinalBuildTargetList
= None
253 self
._FileTypes
= None
255 self
.AutoGenDepSet
= set()
256 self
.ReferenceModules
= []
259 self
.FileDependCache
= {}
261 def __init_platform_info__(self
):
262 pinfo
= self
.DataPipe
.Get("P_Info")
263 self
.WorkspaceDir
= pinfo
.get("WorkspaceDir")
264 self
.PlatformInfo
= PlatformInfo(self
.Workspace
,pinfo
.get("ActivePlatform"),pinfo
.get("Target"),pinfo
.get("ToolChain"),pinfo
.get("Arch"),self
.DataPipe
)
265 ## hash() operator of ModuleAutoGen
267 # The module file path and arch string will be used to represent
268 # hash value of this object
270 # @retval int Hash value of the module file path and arch
272 @cached_class_function
274 return hash((self
.MetaFile
, self
.Arch
))
276 return "%s [%s]" % (self
.MetaFile
, self
.Arch
)
278 # Get FixedAtBuild Pcds of this Module
280 def FixedAtBuildPcds(self
):
282 for Pcd
in self
.ModulePcdList
:
283 if Pcd
.Type
!= TAB_PCDS_FIXED_AT_BUILD
:
285 if Pcd
not in RetVal
:
290 def FixedVoidTypePcds(self
):
292 for Pcd
in self
.FixedAtBuildPcds
:
293 if Pcd
.DatumType
== TAB_VOID
:
294 if '.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
)) not in RetVal
:
295 RetVal
['.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
))] = Pcd
.DefaultValue
299 def UniqueBaseName(self
):
300 ModuleNames
= self
.DataPipe
.Get("M_Name")
303 return ModuleNames
.get((self
.Name
,self
.MetaFile
),self
.Name
)
305 # Macros could be used in build_rule.txt (also Makefile)
309 ("WORKSPACE" ,self
.WorkspaceDir
),
310 ("MODULE_NAME" ,self
.Name
),
311 ("MODULE_NAME_GUID" ,self
.UniqueBaseName
),
312 ("MODULE_GUID" ,self
.Guid
),
313 ("MODULE_VERSION" ,self
.Version
),
314 ("MODULE_TYPE" ,self
.ModuleType
),
315 ("MODULE_FILE" ,str(self
.MetaFile
)),
316 ("MODULE_FILE_BASE_NAME" ,self
.MetaFile
.BaseName
),
317 ("MODULE_RELATIVE_DIR" ,self
.SourceDir
),
318 ("MODULE_DIR" ,self
.SourceDir
),
319 ("BASE_NAME" ,self
.Name
),
321 ("TOOLCHAIN" ,self
.ToolChain
),
322 ("TOOLCHAIN_TAG" ,self
.ToolChain
),
323 ("TOOL_CHAIN_TAG" ,self
.ToolChain
),
324 ("TARGET" ,self
.BuildTarget
),
325 ("BUILD_DIR" ,self
.PlatformInfo
.BuildDir
),
326 ("BIN_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
327 ("LIB_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
328 ("MODULE_BUILD_DIR" ,self
.BuildDir
),
329 ("OUTPUT_DIR" ,self
.OutputDir
),
330 ("DEBUG_DIR" ,self
.DebugDir
),
331 ("DEST_DIR_OUTPUT" ,self
.OutputDir
),
332 ("DEST_DIR_DEBUG" ,self
.DebugDir
),
333 ("PLATFORM_NAME" ,self
.PlatformInfo
.Name
),
334 ("PLATFORM_GUID" ,self
.PlatformInfo
.Guid
),
335 ("PLATFORM_VERSION" ,self
.PlatformInfo
.Version
),
336 ("PLATFORM_RELATIVE_DIR" ,self
.PlatformInfo
.SourceDir
),
337 ("PLATFORM_DIR" ,mws
.join(self
.WorkspaceDir
, self
.PlatformInfo
.SourceDir
)),
338 ("PLATFORM_OUTPUT_DIR" ,self
.PlatformInfo
.OutputDir
),
339 ("FFS_OUTPUT_DIR" ,self
.FfsOutputDir
)
342 ## Return the module build data object
345 return self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
347 ## Return the module name
350 return self
.Module
.BaseName
352 ## Return the module DxsFile if exist
355 return self
.Module
.DxsFile
357 ## Return the module meta-file GUID
361 # To build same module more than once, the module path with FILE_GUID overridden has
362 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
363 # in DSC. The overridden GUID can be retrieved from file name
365 if os
.path
.basename(self
.MetaFile
.File
) != os
.path
.basename(self
.MetaFile
.Path
):
367 # Length of GUID is 36
369 return os
.path
.basename(self
.MetaFile
.Path
)[:36]
370 return self
.Module
.Guid
372 ## Return the module version
375 return self
.Module
.Version
377 ## Return the module type
379 def ModuleType(self
):
380 return self
.Module
.ModuleType
382 ## Return the component type (for Edk.x style of module)
384 def ComponentType(self
):
385 return self
.Module
.ComponentType
387 ## Return the build type
390 return self
.Module
.BuildType
392 ## Return the PCD_IS_DRIVER setting
394 def PcdIsDriver(self
):
395 return self
.Module
.PcdIsDriver
397 ## Return the autogen version, i.e. module meta-file version
399 def AutoGenVersion(self
):
400 return self
.Module
.AutoGenVersion
402 ## Check if the module is library or not
405 return bool(self
.Module
.LibraryClass
)
407 ## Check if the module is binary module or not
409 def IsBinaryModule(self
):
410 return self
.Module
.IsBinaryModule
412 ## Return the directory to store intermediate files of the module
416 self
.PlatformInfo
.BuildDir
,
419 self
.MetaFile
.BaseName
422 ## Return the directory to store the intermediate object files of the module
425 return _MakeDir((self
.BuildDir
, "OUTPUT"))
427 ## Return the directory path to store ffs file
429 def FfsOutputDir(self
):
430 if GlobalData
.gFdfParser
:
431 return path
.join(self
.PlatformInfo
.BuildDir
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
434 ## Return the directory to store auto-gened source files of the module
437 return _MakeDir((self
.BuildDir
, "DEBUG"))
439 ## Return the path of custom file
441 def CustomMakefile(self
):
443 for Type
in self
.Module
.CustomMakefile
:
444 MakeType
= gMakeTypeMap
[Type
] if Type
in gMakeTypeMap
else 'nmake'
445 File
= os
.path
.join(self
.SourceDir
, self
.Module
.CustomMakefile
[Type
])
446 RetVal
[MakeType
] = File
449 ## Return the directory of the makefile
451 # @retval string The directory string of module's makefile
454 def MakeFileDir(self
):
457 ## Return build command string
459 # @retval string Build command string
462 def BuildCommand(self
):
463 return self
.PlatformInfo
.BuildCommand
465 ## Get object list of all packages the module and its dependent libraries belong to
467 # @retval list The list of package object
470 def DerivedPackageList(self
):
472 for M
in [self
.Module
] + self
.DependentLibraryList
:
473 for Package
in M
.Packages
:
474 if Package
in PackageList
:
476 PackageList
.append(Package
)
479 ## Get the depex string
481 # @return : a string contain all depex expression.
482 def _GetDepexExpresionString(self
):
485 ## DPX_SOURCE IN Define section.
486 if self
.Module
.DxsFile
:
488 for M
in [self
.Module
] + self
.DependentLibraryList
:
489 Filename
= M
.MetaFile
.Path
490 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
491 DepexExpressionList
= InfObj
.GetDepexExpresionList()
492 for DepexExpression
in DepexExpressionList
:
493 for key
in DepexExpression
:
494 Arch
, ModuleType
= key
495 DepexExpr
= [x
for x
in DepexExpression
[key
] if not str(x
).startswith('#')]
496 # the type of build module is USER_DEFINED.
497 # All different DEPEX section tags would be copied into the As Built INF file
498 # and there would be separate DEPEX section tags
499 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
500 if (Arch
.upper() == self
.Arch
.upper()) and (ModuleType
.upper() != TAB_ARCH_COMMON
):
501 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
503 if Arch
.upper() == TAB_ARCH_COMMON
or \
504 (Arch
.upper() == self
.Arch
.upper() and \
505 ModuleType
.upper() in [TAB_ARCH_COMMON
, self
.ModuleType
.upper()]):
506 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
508 #the type of build module is USER_DEFINED.
509 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
510 for Depex
in DepexList
:
512 DepexStr
+= '[Depex.%s.%s]\n' % key
513 DepexStr
+= '\n'.join('# '+ val
for val
in Depex
[key
])
516 return '[Depex.%s]\n' % self
.Arch
519 #the type of build module not is USER_DEFINED.
521 for Depex
in DepexList
:
526 for D
in Depex
.values():
527 DepexStr
+= ' '.join(val
for val
in D
)
528 Index
= DepexStr
.find('END')
529 if Index
> -1 and Index
== len(DepexStr
) - 3:
530 DepexStr
= DepexStr
[:-3]
531 DepexStr
= DepexStr
.strip()
534 DepexStr
= DepexStr
.lstrip('(').rstrip(')').strip()
536 return '[Depex.%s]\n' % self
.Arch
537 return '[Depex.%s]\n# ' % self
.Arch
+ DepexStr
539 ## Merge dependency expression
541 # @retval list The token list of the dependency expression after parsed
545 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
550 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
552 FixedVoidTypePcds
= {}
553 for M
in [self
] + self
.LibraryAutoGenList
:
554 FixedVoidTypePcds
.update(M
.FixedVoidTypePcds
)
555 for M
in [self
] + self
.LibraryAutoGenList
:
557 for D
in M
.Module
.Depex
[self
.Arch
, self
.ModuleType
]:
559 DepexList
.append('AND')
560 DepexList
.append('(')
561 #replace D with value if D is FixedAtBuild PCD
568 Value
= FixedVoidTypePcds
[item
]
569 if len(Value
.split(',')) != 16:
570 EdkLogger
.error("build", FORMAT_INVALID
,
571 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item
))
572 NewList
.append(Value
)
574 EdkLogger
.error("build", FORMAT_INVALID
, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item
))
576 DepexList
.extend(NewList
)
577 if DepexList
[-1] == 'END': # no need of a END at this time
579 DepexList
.append(')')
582 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.Module
.BaseName
, DepexList
))
583 if 'BEFORE' in DepexList
or 'AFTER' in DepexList
:
585 if len(DepexList
) > 0:
586 EdkLogger
.verbose('')
587 return {self
.ModuleType
:DepexList
}
589 ## Merge dependency expression
591 # @retval list The token list of the dependency expression after parsed
594 def DepexExpressionDict(self
):
595 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
598 DepexExpressionString
= ''
600 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
602 for M
in [self
.Module
] + self
.DependentLibraryList
:
604 for D
in M
.DepexExpression
[self
.Arch
, self
.ModuleType
]:
605 if DepexExpressionString
!= '':
606 DepexExpressionString
+= ' AND '
607 DepexExpressionString
+= '('
608 DepexExpressionString
+= D
609 DepexExpressionString
= DepexExpressionString
.rstrip('END').strip()
610 DepexExpressionString
+= ')'
613 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.BaseName
, DepexExpressionString
))
614 if 'BEFORE' in DepexExpressionString
or 'AFTER' in DepexExpressionString
:
616 if len(DepexExpressionString
) > 0:
617 EdkLogger
.verbose('')
619 return {self
.ModuleType
:DepexExpressionString
}
621 # Get the tiano core user extension, it is contain dependent library.
622 # @retval: a list contain tiano core userextension.
624 def _GetTianoCoreUserExtensionList(self
):
625 TianoCoreUserExtentionList
= []
626 for M
in [self
.Module
] + self
.DependentLibraryList
:
627 Filename
= M
.MetaFile
.Path
628 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
629 TianoCoreUserExtenList
= InfObj
.GetUserExtensionTianoCore()
630 for TianoCoreUserExtent
in TianoCoreUserExtenList
:
631 for Section
in TianoCoreUserExtent
:
632 ItemList
= Section
.split(TAB_SPLIT
)
634 if len(ItemList
) == 4:
636 if Arch
.upper() == TAB_ARCH_COMMON
or Arch
.upper() == self
.Arch
.upper():
638 TianoCoreList
.extend([TAB_SECTION_START
+ Section
+ TAB_SECTION_END
])
639 TianoCoreList
.extend(TianoCoreUserExtent
[Section
][:])
640 TianoCoreList
.append('\n')
641 TianoCoreUserExtentionList
.append(TianoCoreList
)
643 return TianoCoreUserExtentionList
645 ## Return the list of specification version required for the module
647 # @retval list The list of specification defined in module file
650 def Specification(self
):
651 return self
.Module
.Specification
653 ## Tool option for the module build
655 # @param PlatformInfo The object of PlatformBuildInfo
656 # @retval dict The dict containing valid options
659 def BuildOption(self
):
660 RetVal
, self
.BuildRuleOrder
= self
.PlatformInfo
.ApplyBuildOption(self
.Module
)
661 if self
.BuildRuleOrder
:
662 self
.BuildRuleOrder
= ['.%s' % Ext
for Ext
in self
.BuildRuleOrder
.split()]
665 ## Get include path list from tool option for the module build
667 # @retval list The include path list
670 def BuildOptionIncPathList(self
):
672 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
673 # is the former use /I , the Latter used -I to specify include directories
675 if self
.PlatformInfo
.ToolChainFamily
in (TAB_COMPILER_MSFT
):
676 BuildOptIncludeRegEx
= gBuildOptIncludePatternMsft
677 elif self
.PlatformInfo
.ToolChainFamily
in ('INTEL', 'GCC', 'RVCT'):
678 BuildOptIncludeRegEx
= gBuildOptIncludePatternOther
681 # New ToolChainFamily, don't known whether there is option to specify include directories
686 for Tool
in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
688 FlagOption
= self
.BuildOption
[Tool
]['FLAGS']
692 if self
.ToolChainFamily
!= 'RVCT':
693 IncPathList
= [NormPath(Path
, self
.Macros
) for Path
in BuildOptIncludeRegEx
.findall(FlagOption
)]
696 # RVCT may specify a list of directory seperated by commas
699 for Path
in BuildOptIncludeRegEx
.findall(FlagOption
):
700 PathList
= GetSplitList(Path
, TAB_COMMA_SPLIT
)
701 IncPathList
.extend(NormPath(PathEntry
, self
.Macros
) for PathEntry
in PathList
)
704 # EDK II modules must not reference header files outside of the packages they depend on or
705 # within the module's directory tree. Report error if violation.
707 if GlobalData
.gDisableIncludePathCheck
== False:
708 for Path
in IncPathList
:
709 if (Path
not in self
.IncludePathList
) and (CommonPath([Path
, self
.MetaFile
.Dir
]) != self
.MetaFile
.Dir
):
710 ErrMsg
= "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path
, Tool
, FlagOption
)
711 EdkLogger
.error("build",
714 File
=str(self
.MetaFile
))
715 RetVal
+= IncPathList
718 ## Return a list of files which can be built from source
720 # What kind of files can be built is determined by build rules in
721 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
724 def SourceFileList(self
):
726 ToolChainTagSet
= {"", TAB_STAR
, self
.ToolChain
}
727 ToolChainFamilySet
= {"", TAB_STAR
, self
.ToolChainFamily
, self
.BuildRuleFamily
}
728 for F
in self
.Module
.Sources
:
730 if F
.TagName
not in ToolChainTagSet
:
731 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "The toolchain [%s] for processing file [%s] is found, "
732 "but [%s] is currently used" % (F
.TagName
, str(F
), self
.ToolChain
))
734 # match tool chain family or build rule family
735 if F
.ToolChainFamily
not in ToolChainFamilySet
:
738 "The file [%s] must be built by tools of [%s], " \
739 "but current toolchain family is [%s], buildrule family is [%s]" \
740 % (str(F
), F
.ToolChainFamily
, self
.ToolChainFamily
, self
.BuildRuleFamily
))
743 # add the file path into search path list for file including
744 if F
.Dir
not in self
.IncludePathList
:
745 self
.IncludePathList
.insert(0, F
.Dir
)
748 self
._MatchBuildRuleOrder
(RetVal
)
751 self
._ApplyBuildRule
(F
, TAB_UNKNOWN_FILE
)
754 def _MatchBuildRuleOrder(self
, FileList
):
757 for SingleFile
in FileList
:
758 if self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRules
:
759 key
= SingleFile
.Path
.rsplit(SingleFile
.Ext
,1)[0]
760 if key
in Order_Dict
:
761 Order_Dict
[key
].append(SingleFile
.Ext
)
763 Order_Dict
[key
] = [SingleFile
.Ext
]
767 if len(Order_Dict
[F
]) > 1:
768 Order_Dict
[F
].sort(key
=lambda i
: self
.BuildRuleOrder
.index(i
))
769 for Ext
in Order_Dict
[F
][1:]:
770 RemoveList
.append(F
+ Ext
)
772 for item
in RemoveList
:
773 FileList
.remove(item
)
777 ## Return the list of unicode files
779 def UnicodeFileList(self
):
780 return self
.FileTypes
.get(TAB_UNICODE_FILE
,[])
782 ## Return the list of vfr files
784 def VfrFileList(self
):
785 return self
.FileTypes
.get(TAB_VFR_FILE
, [])
787 ## Return the list of Image Definition files
789 def IdfFileList(self
):
790 return self
.FileTypes
.get(TAB_IMAGE_FILE
,[])
792 ## Return a list of files which can be built from binary
794 # "Build" binary files are just to copy them to build directory.
796 # @retval list The list of files which can be built later
799 def BinaryFileList(self
):
801 for F
in self
.Module
.Binaries
:
802 if F
.Target
not in [TAB_ARCH_COMMON
, TAB_STAR
] and F
.Target
!= self
.BuildTarget
:
805 self
._ApplyBuildRule
(F
, F
.Type
, BinaryFileList
=RetVal
)
809 def BuildRules(self
):
811 BuildRuleDatabase
= self
.PlatformInfo
.BuildRule
812 for Type
in BuildRuleDatabase
.FileTypeList
:
813 #first try getting build rule by BuildRuleFamily
814 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.BuildRuleFamily
]
816 # build type is always module type, but ...
817 if self
.ModuleType
!= self
.BuildType
:
818 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.BuildRuleFamily
]
819 #second try getting build rule by ToolChainFamily
821 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.ToolChainFamily
]
823 # build type is always module type, but ...
824 if self
.ModuleType
!= self
.BuildType
:
825 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.ToolChainFamily
]
828 RuleObject
= RuleObject
.Instantiate(self
.Macros
)
829 RetVal
[Type
] = RuleObject
830 for Ext
in RuleObject
.SourceFileExtList
:
831 RetVal
[Ext
] = RuleObject
834 def _ApplyBuildRule(self
, File
, FileType
, BinaryFileList
=None):
835 if self
._BuildTargets
is None:
836 self
._IntroBuildTargetList
= set()
837 self
._FinalBuildTargetList
= set()
838 self
._BuildTargets
= defaultdict(set)
839 self
._FileTypes
= defaultdict(set)
841 if not BinaryFileList
:
842 BinaryFileList
= self
.BinaryFileList
844 SubDirectory
= os
.path
.join(self
.OutputDir
, File
.SubDir
)
845 if not os
.path
.exists(SubDirectory
):
846 CreateDirectory(SubDirectory
)
852 # Make sure to get build rule order value
856 while Index
< len(SourceList
):
857 Source
= SourceList
[Index
]
861 CreateDirectory(Source
.Dir
)
863 if File
.IsBinary
and File
== Source
and File
in BinaryFileList
:
864 # Skip all files that are not binary libraries
865 if not self
.IsLibrary
:
867 RuleObject
= self
.BuildRules
[TAB_DEFAULT_BINARY_FILE
]
868 elif FileType
in self
.BuildRules
:
869 RuleObject
= self
.BuildRules
[FileType
]
870 elif Source
.Ext
in self
.BuildRules
:
871 RuleObject
= self
.BuildRules
[Source
.Ext
]
873 # stop at no more rules
875 self
._FinalBuildTargetList
.add(LastTarget
)
878 FileType
= RuleObject
.SourceFileType
879 self
._FileTypes
[FileType
].add(Source
)
881 # stop at STATIC_LIBRARY for library
882 if self
.IsLibrary
and FileType
== TAB_STATIC_LIBRARY
:
884 self
._FinalBuildTargetList
.add(LastTarget
)
887 Target
= RuleObject
.Apply(Source
, self
.BuildRuleOrder
)
890 self
._FinalBuildTargetList
.add(LastTarget
)
892 elif not Target
.Outputs
:
893 # Only do build for target with outputs
894 self
._FinalBuildTargetList
.add(Target
)
896 self
._BuildTargets
[FileType
].add(Target
)
898 if not Source
.IsBinary
and Source
== File
:
899 self
._IntroBuildTargetList
.add(Target
)
901 # to avoid cyclic rule
902 if FileType
in RuleChain
:
905 RuleChain
.add(FileType
)
906 SourceList
.extend(Target
.Outputs
)
908 FileType
= TAB_UNKNOWN_FILE
912 if self
._BuildTargets
is None:
913 self
._IntroBuildTargetList
= set()
914 self
._FinalBuildTargetList
= set()
915 self
._BuildTargets
= defaultdict(set)
916 self
._FileTypes
= defaultdict(set)
918 #TRICK: call SourceFileList property to apply build rule for source files
921 #TRICK: call _GetBinaryFileList to apply build rule for binary files
924 return self
._BuildTargets
927 def IntroTargetList(self
):
929 return self
._IntroBuildTargetList
932 def CodaTargetList(self
):
934 return self
._FinalBuildTargetList
939 return self
._FileTypes
941 ## Get the list of package object the module depends on
943 # @retval list The package object list
946 def DependentPackageList(self
):
947 return self
.Module
.Packages
949 ## Return the list of auto-generated code file
951 # @retval list The list of auto-generated file
954 def AutoGenFileList(self
):
955 AutoGenUniIdf
= self
.BuildType
!= 'UEFI_HII'
956 UniStringBinBuffer
= BytesIO()
957 IdfGenBinBuffer
= BytesIO()
959 AutoGenC
= TemplateString()
960 AutoGenH
= TemplateString()
961 StringH
= TemplateString()
962 StringIdf
= TemplateString()
963 GenC
.CreateCode(self
, AutoGenC
, AutoGenH
, StringH
, AutoGenUniIdf
, UniStringBinBuffer
, StringIdf
, AutoGenUniIdf
, IdfGenBinBuffer
)
965 # AutoGen.c is generated if there are library classes in inf, or there are object files
967 if str(AutoGenC
) != "" and (len(self
.Module
.LibraryClasses
) > 0
968 or TAB_OBJECT_FILE
in self
.FileTypes
):
969 AutoFile
= PathClass(gAutoGenCodeFileName
, self
.DebugDir
)
970 RetVal
[AutoFile
] = str(AutoGenC
)
971 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
972 if str(AutoGenH
) != "":
973 AutoFile
= PathClass(gAutoGenHeaderFileName
, self
.DebugDir
)
974 RetVal
[AutoFile
] = str(AutoGenH
)
975 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
976 if str(StringH
) != "":
977 AutoFile
= PathClass(gAutoGenStringFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
978 RetVal
[AutoFile
] = str(StringH
)
979 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
980 if UniStringBinBuffer
is not None and UniStringBinBuffer
.getvalue() != b
"":
981 AutoFile
= PathClass(gAutoGenStringFormFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
982 RetVal
[AutoFile
] = UniStringBinBuffer
.getvalue()
983 AutoFile
.IsBinary
= True
984 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
985 if UniStringBinBuffer
is not None:
986 UniStringBinBuffer
.close()
987 if str(StringIdf
) != "":
988 AutoFile
= PathClass(gAutoGenImageDefFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
989 RetVal
[AutoFile
] = str(StringIdf
)
990 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
991 if IdfGenBinBuffer
is not None and IdfGenBinBuffer
.getvalue() != b
"":
992 AutoFile
= PathClass(gAutoGenIdfFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
993 RetVal
[AutoFile
] = IdfGenBinBuffer
.getvalue()
994 AutoFile
.IsBinary
= True
995 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
996 if IdfGenBinBuffer
is not None:
997 IdfGenBinBuffer
.close()
1000 ## Return the list of library modules explicitly or implicitly used by this module
1002 def DependentLibraryList(self
):
1003 # only merge library classes and PCD for non-library module
1006 return self
.PlatformInfo
.ApplyLibraryInstance(self
.Module
)
1008 ## Get the list of PCDs from current module
1010 # @retval list The list of PCD
1013 def ModulePcdList(self
):
1014 # apply PCD settings from platform
1015 RetVal
= self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, self
.Module
.Pcds
)
1019 def _PcdComments(self
):
1020 ReVal
= OrderedListDict()
1021 ExtendCopyDictionaryLists(ReVal
, self
.Module
.PcdComments
)
1022 if not self
.IsLibrary
:
1023 for Library
in self
.DependentLibraryList
:
1024 ExtendCopyDictionaryLists(ReVal
, Library
.PcdComments
)
1027 ## Get the list of PCDs from dependent libraries
1029 # @retval list The list of PCD
1032 def LibraryPcdList(self
):
1037 # get PCDs from dependent libraries
1038 for Library
in self
.DependentLibraryList
:
1039 PcdsInLibrary
= OrderedDict()
1040 for Key
in Library
.Pcds
:
1041 # skip duplicated PCDs
1042 if Key
in self
.Module
.Pcds
or Key
in Pcds
:
1045 PcdsInLibrary
[Key
] = copy
.copy(Library
.Pcds
[Key
])
1046 RetVal
.extend(self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, PcdsInLibrary
, Library
=Library
))
1049 ## Get the GUID value mapping
1051 # @retval dict The mapping between GUID cname and its value
1055 RetVal
= self
.Module
.Guids
1056 for Library
in self
.DependentLibraryList
:
1057 RetVal
.update(Library
.Guids
)
1058 ExtendCopyDictionaryLists(self
._GuidComments
, Library
.GuidComments
)
1059 ExtendCopyDictionaryLists(self
._GuidComments
, self
.Module
.GuidComments
)
1063 def GetGuidsUsedByPcd(self
):
1064 RetVal
= OrderedDict(self
.Module
.GetGuidsUsedByPcd())
1065 for Library
in self
.DependentLibraryList
:
1066 RetVal
.update(Library
.GetGuidsUsedByPcd())
1068 ## Get the protocol value mapping
1070 # @retval dict The mapping between protocol cname and its value
1073 def ProtocolList(self
):
1074 RetVal
= OrderedDict(self
.Module
.Protocols
)
1075 for Library
in self
.DependentLibraryList
:
1076 RetVal
.update(Library
.Protocols
)
1077 ExtendCopyDictionaryLists(self
._ProtocolComments
, Library
.ProtocolComments
)
1078 ExtendCopyDictionaryLists(self
._ProtocolComments
, self
.Module
.ProtocolComments
)
1081 ## Get the PPI value mapping
1083 # @retval dict The mapping between PPI cname and its value
1087 RetVal
= OrderedDict(self
.Module
.Ppis
)
1088 for Library
in self
.DependentLibraryList
:
1089 RetVal
.update(Library
.Ppis
)
1090 ExtendCopyDictionaryLists(self
._PpiComments
, Library
.PpiComments
)
1091 ExtendCopyDictionaryLists(self
._PpiComments
, self
.Module
.PpiComments
)
1094 ## Get the list of include search path
1096 # @retval list The list path
1099 def IncludePathList(self
):
1101 RetVal
.append(self
.MetaFile
.Dir
)
1102 RetVal
.append(self
.DebugDir
)
1104 for Package
in self
.Module
.Packages
:
1105 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1106 if PackageDir
not in RetVal
:
1107 RetVal
.append(PackageDir
)
1108 IncludesList
= Package
.Includes
1109 if Package
._PrivateIncludes
:
1110 if not self
.MetaFile
.OriginalPath
.Path
.startswith(PackageDir
):
1111 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1112 for Inc
in IncludesList
:
1113 if Inc
not in RetVal
:
1114 RetVal
.append(str(Inc
))
1118 def IncludePathLength(self
):
1119 return sum(len(inc
)+1 for inc
in self
.IncludePathList
)
1121 ## Get the list of include paths from the packages
1123 # @IncludesList list The list path
1126 def PackageIncludePathList(self
):
1128 for Package
in self
.Module
.Packages
:
1129 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1130 IncludesList
= Package
.Includes
1131 if Package
._PrivateIncludes
:
1132 if not self
.MetaFile
.Path
.startswith(PackageDir
):
1133 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1136 ## Get HII EX PCDs which maybe used by VFR
1138 # efivarstore used by VFR may relate with HII EX PCDs
1139 # Get the variable name and GUID from efivarstore and HII EX PCD
1140 # List the HII EX PCDs in As Built INF if both name and GUID match.
1142 # @retval list HII EX PCDs
1144 def _GetPcdsMaybeUsedByVfr(self
):
1145 if not self
.SourceFileList
:
1149 for SrcFile
in self
.SourceFileList
:
1150 if SrcFile
.Ext
.lower() != '.vfr':
1152 Vfri
= os
.path
.join(self
.OutputDir
, SrcFile
.BaseName
+ '.i')
1153 if not os
.path
.exists(Vfri
):
1155 VfriFile
= open(Vfri
, 'r')
1156 Content
= VfriFile
.read()
1158 Pos
= Content
.find('efivarstore')
1161 # Make sure 'efivarstore' is the start of efivarstore statement
1162 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1165 while Index
>= 0 and Content
[Index
] in ' \t\r\n':
1167 if Index
>= 0 and Content
[Index
] != ';':
1168 Pos
= Content
.find('efivarstore', Pos
+ len('efivarstore'))
1171 # 'efivarstore' must be followed by name and guid
1173 Name
= gEfiVarStoreNamePattern
.search(Content
, Pos
)
1176 Guid
= gEfiVarStoreGuidPattern
.search(Content
, Pos
)
1179 NameArray
= _ConvertStringToByteArray('L"' + Name
.group(1) + '"')
1180 NameGuids
.add((NameArray
, GuidStructureStringToGuidString(Guid
.group(1))))
1181 Pos
= Content
.find('efivarstore', Name
.end())
1185 for Pcd
in self
.PlatformInfo
.Pcds
.values():
1186 if Pcd
.Type
!= TAB_PCDS_DYNAMIC_EX_HII
:
1188 for SkuInfo
in Pcd
.SkuInfoList
.values():
1189 Value
= GuidValue(SkuInfo
.VariableGuid
, self
.PlatformInfo
.PackageList
, self
.MetaFile
.Path
)
1192 Name
= _ConvertStringToByteArray(SkuInfo
.VariableName
)
1193 Guid
= GuidStructureStringToGuidString(Value
)
1194 if (Name
, Guid
) in NameGuids
and Pcd
not in HiiExPcds
:
1195 HiiExPcds
.append(Pcd
)
1200 def _GenOffsetBin(self
):
1202 for SourceFile
in self
.Module
.Sources
:
1203 if SourceFile
.Type
.upper() == ".VFR" :
1205 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1207 VfrUniBaseName
[SourceFile
.BaseName
] = (SourceFile
.BaseName
+ "Bin")
1208 elif SourceFile
.Type
.upper() == ".UNI" :
1210 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1212 VfrUniBaseName
["UniOffsetName"] = (self
.Name
+ "Strings")
1214 if not VfrUniBaseName
:
1216 MapFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".map")
1217 EfiFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".efi")
1218 VfrUniOffsetList
= GetVariableOffset(MapFileName
, EfiFileName
, list(VfrUniBaseName
.values()))
1219 if not VfrUniOffsetList
:
1222 OutputName
= '%sOffset.bin' % self
.Name
1223 UniVfrOffsetFileName
= os
.path
.join( self
.OutputDir
, OutputName
)
1226 fInputfile
= open(UniVfrOffsetFileName
, "wb+", 0)
1228 EdkLogger
.error("build", FILE_OPEN_FAILURE
, "File open failed for %s" % UniVfrOffsetFileName
, None)
1230 # Use a instance of BytesIO to cache data
1231 fStringIO
= BytesIO()
1233 for Item
in VfrUniOffsetList
:
1234 if (Item
[0].find("Strings") != -1):
1236 # UNI offset in image.
1238 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1240 UniGuid
= b
'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1241 fStringIO
.write(UniGuid
)
1242 UniValue
= pack ('Q', int (Item
[1], 16))
1243 fStringIO
.write (UniValue
)
1246 # VFR binary offset in image.
1248 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1250 VfrGuid
= b
'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1251 fStringIO
.write(VfrGuid
)
1252 VfrValue
= pack ('Q', int (Item
[1], 16))
1253 fStringIO
.write (VfrValue
)
1255 # write data into file.
1258 fInputfile
.write (fStringIO
.getvalue())
1260 EdkLogger
.error("build", FILE_WRITE_FAILURE
, "Write data to file %s failed, please check whether the "
1261 "file been locked or using by other applications." %UniVfrOffsetFileName
, None)
1268 def OutputFile(self
):
1270 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1271 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1272 FfsOutputDir
= self
.FfsOutputDir
.replace('\\', '/').rstrip('/')
1273 for Item
in self
.CodaTargetList
:
1274 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1276 if self
.DepexGenerated
:
1277 retVal
.add(self
.Name
+ '.depex')
1279 Bin
= self
._GenOffsetBin
()
1283 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1285 if File
.lower().endswith('.pdb'):
1288 for Root
, Dirs
, Files
in os
.walk(FfsOutputDir
):
1290 if File
.lower().endswith('.ffs') or File
.lower().endswith('.offset') or File
.lower().endswith('.raw') \
1291 or File
.lower().endswith('.raw.txt'):
1296 ## Create AsBuilt INF file the module
1298 def CreateAsBuiltInf(self
):
1300 if self
.IsAsBuiltInfCreated
:
1303 # Skip INF file generation for libraries
1307 # Skip the following code for modules with no source files
1308 if not self
.SourceFileList
:
1311 # Skip the following code for modules without any binary files
1312 if self
.BinaryFileList
:
1315 ### TODO: How to handles mixed source and binary modules
1317 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1318 # Also find all packages that the DynamicEx PCDs depend on
1323 PcdTokenSpaceList
= []
1324 for Pcd
in self
.ModulePcdList
+ self
.LibraryPcdList
:
1325 if Pcd
.Type
== TAB_PCDS_PATCHABLE_IN_MODULE
:
1326 PatchablePcds
.append(Pcd
)
1327 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_PATCHABLE_IN_MODULE
))
1328 elif Pcd
.Type
in PCD_DYNAMIC_EX_TYPE_SET
:
1331 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
))
1332 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
))
1333 PcdTokenSpaceList
.append(Pcd
.TokenSpaceGuidCName
)
1334 GuidList
= OrderedDict(self
.GuidList
)
1335 for TokenSpace
in self
.GetGuidsUsedByPcd
:
1336 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1337 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1338 if TokenSpace
not in PcdTokenSpaceList
and TokenSpace
in GuidList
:
1339 GuidList
.pop(TokenSpace
)
1340 CheckList
= (GuidList
, self
.PpiList
, self
.ProtocolList
, PcdCheckList
)
1341 for Package
in self
.DerivedPackageList
:
1342 if Package
in Packages
:
1344 BeChecked
= (Package
.Guids
, Package
.Ppis
, Package
.Protocols
, Package
.Pcds
)
1346 for Index
in range(len(BeChecked
)):
1347 for Item
in CheckList
[Index
]:
1348 if Item
in BeChecked
[Index
]:
1349 Packages
.append(Package
)
1355 VfrPcds
= self
._GetPcdsMaybeUsedByVfr
()
1356 for Pkg
in self
.PlatformInfo
.PackageList
:
1359 for VfrPcd
in VfrPcds
:
1360 if ((VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
) in Pkg
.Pcds
or
1361 (VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
) in Pkg
.Pcds
):
1362 Packages
.append(Pkg
)
1365 ModuleType
= SUP_MODULE_DXE_DRIVER
if self
.ModuleType
== SUP_MODULE_UEFI_DRIVER
and self
.DepexGenerated
else self
.ModuleType
1366 DriverType
= self
.PcdIsDriver
if self
.PcdIsDriver
else ''
1368 MDefs
= self
.Module
.Defines
1371 'module_name' : self
.Name
,
1372 'module_guid' : Guid
,
1373 'module_module_type' : ModuleType
,
1374 'module_version_string' : [MDefs
['VERSION_STRING']] if 'VERSION_STRING' in MDefs
else [],
1375 'pcd_is_driver_string' : [],
1376 'module_uefi_specification_version' : [],
1377 'module_pi_specification_version' : [],
1378 'module_entry_point' : self
.Module
.ModuleEntryPointList
,
1379 'module_unload_image' : self
.Module
.ModuleUnloadImageList
,
1380 'module_constructor' : self
.Module
.ConstructorList
,
1381 'module_destructor' : self
.Module
.DestructorList
,
1382 'module_shadow' : [MDefs
['SHADOW']] if 'SHADOW' in MDefs
else [],
1383 'module_pci_vendor_id' : [MDefs
['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs
else [],
1384 'module_pci_device_id' : [MDefs
['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs
else [],
1385 'module_pci_class_code' : [MDefs
['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs
else [],
1386 'module_pci_revision' : [MDefs
['PCI_REVISION']] if 'PCI_REVISION' in MDefs
else [],
1387 'module_build_number' : [MDefs
['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs
else [],
1388 'module_spec' : [MDefs
['SPEC']] if 'SPEC' in MDefs
else [],
1389 'module_uefi_hii_resource_section' : [MDefs
['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs
else [],
1390 'module_uni_file' : [MDefs
['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs
else [],
1391 'module_arch' : self
.Arch
,
1392 'package_item' : [Package
.MetaFile
.File
.replace('\\', '/') for Package
in Packages
],
1394 'patchablepcd_item' : [],
1396 'protocol_item' : [],
1400 'libraryclasses_item' : []
1403 if 'MODULE_UNI_FILE' in MDefs
:
1404 UNIFile
= os
.path
.join(self
.MetaFile
.Dir
, MDefs
['MODULE_UNI_FILE'])
1405 if os
.path
.isfile(UNIFile
):
1406 shutil
.copy2(UNIFile
, self
.OutputDir
)
1408 if self
.AutoGenVersion
> int(gInfSpecVersion
, 0):
1409 AsBuiltInfDict
['module_inf_version'] = '0x%08x' % self
.AutoGenVersion
1411 AsBuiltInfDict
['module_inf_version'] = gInfSpecVersion
1414 AsBuiltInfDict
['pcd_is_driver_string'].append(DriverType
)
1416 if 'UEFI_SPECIFICATION_VERSION' in self
.Specification
:
1417 AsBuiltInfDict
['module_uefi_specification_version'].append(self
.Specification
['UEFI_SPECIFICATION_VERSION'])
1418 if 'PI_SPECIFICATION_VERSION' in self
.Specification
:
1419 AsBuiltInfDict
['module_pi_specification_version'].append(self
.Specification
['PI_SPECIFICATION_VERSION'])
1421 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1422 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1423 for Item
in self
.CodaTargetList
:
1424 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1425 if os
.path
.isabs(File
):
1426 File
= File
.replace('\\', '/').strip('/').replace(OutputDir
, '').strip('/')
1427 if Item
.Target
.Ext
.lower() == '.aml':
1428 AsBuiltInfDict
['binary_item'].append('ASL|' + File
)
1429 elif Item
.Target
.Ext
.lower() == '.acpi':
1430 AsBuiltInfDict
['binary_item'].append('ACPI|' + File
)
1431 elif Item
.Target
.Ext
.lower() == '.efi':
1432 AsBuiltInfDict
['binary_item'].append('PE32|' + self
.Name
+ '.efi')
1434 AsBuiltInfDict
['binary_item'].append('BIN|' + File
)
1435 if not self
.DepexGenerated
:
1436 DepexFile
= os
.path
.join(self
.OutputDir
, self
.Name
+ '.depex')
1437 if os
.path
.exists(DepexFile
):
1438 self
.DepexGenerated
= True
1439 if self
.DepexGenerated
:
1440 if self
.ModuleType
in [SUP_MODULE_PEIM
]:
1441 AsBuiltInfDict
['binary_item'].append('PEI_DEPEX|' + self
.Name
+ '.depex')
1442 elif self
.ModuleType
in [SUP_MODULE_DXE_DRIVER
, SUP_MODULE_DXE_RUNTIME_DRIVER
, SUP_MODULE_DXE_SAL_DRIVER
, SUP_MODULE_UEFI_DRIVER
]:
1443 AsBuiltInfDict
['binary_item'].append('DXE_DEPEX|' + self
.Name
+ '.depex')
1444 elif self
.ModuleType
in [SUP_MODULE_DXE_SMM_DRIVER
]:
1445 AsBuiltInfDict
['binary_item'].append('SMM_DEPEX|' + self
.Name
+ '.depex')
1447 Bin
= self
._GenOffsetBin
()
1449 AsBuiltInfDict
['binary_item'].append('BIN|%s' % Bin
)
1451 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1453 if File
.lower().endswith('.pdb'):
1454 AsBuiltInfDict
['binary_item'].append('DISPOSABLE|' + File
)
1455 HeaderComments
= self
.Module
.HeaderComments
1457 for Index
in range(len(HeaderComments
)):
1458 if HeaderComments
[Index
].find('@BinaryHeader') != -1:
1459 HeaderComments
[Index
] = HeaderComments
[Index
].replace('@BinaryHeader', '@file')
1462 AsBuiltInfDict
['header_comments'] = '\n'.join(HeaderComments
[StartPos
:]).replace(':#', '://')
1463 AsBuiltInfDict
['tail_comments'] = '\n'.join(self
.Module
.TailComments
)
1466 (self
.ProtocolList
, self
._ProtocolComments
, 'protocol_item'),
1467 (self
.PpiList
, self
._PpiComments
, 'ppi_item'),
1468 (GuidList
, self
._GuidComments
, 'guid_item')
1470 for Item
in GenList
:
1471 for CName
in Item
[0]:
1472 Comments
= '\n '.join(Item
[1][CName
]) if CName
in Item
[1] else ''
1473 Entry
= Comments
+ '\n ' + CName
if Comments
else CName
1474 AsBuiltInfDict
[Item
[2]].append(Entry
)
1475 PatchList
= parsePcdInfoFromMapFile(
1476 os
.path
.join(self
.OutputDir
, self
.Name
+ '.map'),
1477 os
.path
.join(self
.OutputDir
, self
.Name
+ '.efi')
1480 for Pcd
in PatchablePcds
:
1481 TokenCName
= Pcd
.TokenCName
1482 for PcdItem
in GlobalData
.MixedPcd
:
1483 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1484 TokenCName
= PcdItem
[0]
1486 for PatchPcd
in PatchList
:
1487 if TokenCName
== PatchPcd
[0]:
1492 if Pcd
.DatumType
== 'BOOLEAN':
1493 BoolValue
= Pcd
.DefaultValue
.upper()
1494 if BoolValue
== 'TRUE':
1495 Pcd
.DefaultValue
= '1'
1496 elif BoolValue
== 'FALSE':
1497 Pcd
.DefaultValue
= '0'
1499 if Pcd
.DatumType
in TAB_PCD_NUMERIC_TYPES
:
1500 HexFormat
= '0x%02x'
1501 if Pcd
.DatumType
== TAB_UINT16
:
1502 HexFormat
= '0x%04x'
1503 elif Pcd
.DatumType
== TAB_UINT32
:
1504 HexFormat
= '0x%08x'
1505 elif Pcd
.DatumType
== TAB_UINT64
:
1506 HexFormat
= '0x%016x'
1507 PcdValue
= HexFormat
% int(Pcd
.DefaultValue
, 0)
1509 if Pcd
.MaxDatumSize
is None or Pcd
.MaxDatumSize
== '':
1510 EdkLogger
.error("build", AUTOGEN_ERROR
,
1511 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1513 ArraySize
= int(Pcd
.MaxDatumSize
, 0)
1514 PcdValue
= Pcd
.DefaultValue
1515 if PcdValue
[0] != '{':
1517 if PcdValue
[0] == 'L':
1519 PcdValue
= PcdValue
.lstrip('L')
1520 PcdValue
= eval(PcdValue
)
1522 for Index
in range(0, len(PcdValue
)):
1524 CharVal
= ord(PcdValue
[Index
])
1525 NewValue
= NewValue
+ '0x%02x' % (CharVal
& 0x00FF) + ', ' \
1526 + '0x%02x' % (CharVal
>> 8) + ', '
1528 NewValue
= NewValue
+ '0x%02x' % (ord(PcdValue
[Index
]) % 0x100) + ', '
1531 Padding
= Padding
* 2
1532 ArraySize
= ArraySize
// 2
1533 if ArraySize
< (len(PcdValue
) + 1):
1534 if Pcd
.MaxSizeUserSet
:
1535 EdkLogger
.error("build", AUTOGEN_ERROR
,
1536 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1539 ArraySize
= len(PcdValue
) + 1
1540 if ArraySize
> len(PcdValue
) + 1:
1541 NewValue
= NewValue
+ Padding
* (ArraySize
- len(PcdValue
) - 1)
1542 PcdValue
= NewValue
+ Padding
.strip().rstrip(',') + '}'
1543 elif len(PcdValue
.split(',')) <= ArraySize
:
1544 PcdValue
= PcdValue
.rstrip('}') + ', 0x00' * (ArraySize
- len(PcdValue
.split(',')))
1547 if Pcd
.MaxSizeUserSet
:
1548 EdkLogger
.error("build", AUTOGEN_ERROR
,
1549 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1552 ArraySize
= len(PcdValue
) + 1
1553 PcdItem
= '%s.%s|%s|0x%X' % \
1554 (Pcd
.TokenSpaceGuidCName
, TokenCName
, PcdValue
, PatchPcd
[1])
1556 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1557 PcdComments
= '\n '.join(self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
])
1559 PcdItem
= PcdComments
+ '\n ' + PcdItem
1560 AsBuiltInfDict
['patchablepcd_item'].append(PcdItem
)
1562 for Pcd
in Pcds
+ VfrPcds
:
1565 TokenCName
= Pcd
.TokenCName
1566 for PcdItem
in GlobalData
.MixedPcd
:
1567 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1568 TokenCName
= PcdItem
[0]
1570 if Pcd
.Type
== TAB_PCDS_DYNAMIC_EX_HII
:
1571 for SkuName
in Pcd
.SkuInfoList
:
1572 SkuInfo
= Pcd
.SkuInfoList
[SkuName
]
1573 HiiInfo
= '## %s|%s|%s' % (SkuInfo
.VariableName
, SkuInfo
.VariableGuid
, SkuInfo
.VariableOffset
)
1575 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1576 PcdCommentList
= self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
][:]
1580 for Index
, Comment
in enumerate(PcdCommentList
):
1581 for Usage
in UsageList
:
1582 if Comment
.find(Usage
) != -1:
1586 if UsageIndex
!= -1:
1587 PcdCommentList
[UsageIndex
] = '## %s %s %s' % (UsageStr
, HiiInfo
, PcdCommentList
[UsageIndex
].replace(UsageStr
, ''))
1589 PcdCommentList
.append('## UNDEFINED ' + HiiInfo
)
1590 PcdComments
= '\n '.join(PcdCommentList
)
1591 PcdEntry
= Pcd
.TokenSpaceGuidCName
+ '.' + TokenCName
1593 PcdEntry
= PcdComments
+ '\n ' + PcdEntry
1594 AsBuiltInfDict
['pcd_item'].append(PcdEntry
)
1595 for Item
in self
.BuildOption
:
1596 if 'FLAGS' in self
.BuildOption
[Item
]:
1597 AsBuiltInfDict
['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self
.ToolChainFamily
, self
.BuildTarget
, self
.ToolChain
, self
.Arch
, Item
, self
.BuildOption
[Item
]['FLAGS'].strip()))
1599 # Generated LibraryClasses section in comments.
1600 for Library
in self
.LibraryAutoGenList
:
1601 AsBuiltInfDict
['libraryclasses_item'].append(Library
.MetaFile
.File
.replace('\\', '/'))
1603 # Generated UserExtensions TianoCore section.
1604 # All tianocore user extensions are copied.
1606 for TianoCore
in self
._GetTianoCoreUserExtensionList
():
1607 UserExtStr
+= '\n'.join(TianoCore
)
1608 ExtensionFile
= os
.path
.join(self
.MetaFile
.Dir
, TianoCore
[1])
1609 if os
.path
.isfile(ExtensionFile
):
1610 shutil
.copy2(ExtensionFile
, self
.OutputDir
)
1611 AsBuiltInfDict
['userextension_tianocore_item'] = UserExtStr
1613 # Generated depex expression section in comments.
1614 DepexExpression
= self
._GetDepexExpresionString
()
1615 AsBuiltInfDict
['depexsection_item'] = DepexExpression
if DepexExpression
else ''
1617 AsBuiltInf
= TemplateString()
1618 AsBuiltInf
.Append(gAsBuiltInfHeaderString
.Replace(AsBuiltInfDict
))
1620 SaveFileOnChange(os
.path
.join(self
.OutputDir
, self
.Name
+ '.inf'), str(AsBuiltInf
), False)
1622 self
.IsAsBuiltInfCreated
= True
1624 def CacheCopyFile(self
, OriginDir
, CopyDir
, File
):
1625 sub_dir
= os
.path
.relpath(File
, CopyDir
)
1626 destination_file
= os
.path
.join(OriginDir
, sub_dir
)
1627 destination_dir
= os
.path
.dirname(destination_file
)
1628 CreateDirectory(destination_dir
)
1630 CopyFileOnChange(File
, destination_dir
)
1632 EdkLogger
.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File
, destination_dir
))
1635 def CopyModuleToCache(self
):
1636 self
.GenPreMakefileHash(GlobalData
.gCacheIR
)
1637 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1638 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1639 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1642 self
.GenMakeHash(GlobalData
.gCacheIR
)
1643 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1644 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1645 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1646 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1649 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1650 FileDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
, MakeHashStr
)
1651 FfsDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
, MakeHashStr
)
1653 CreateDirectory (FileDir
)
1654 self
.SaveHashChainFileToCache(GlobalData
.gCacheIR
)
1655 ModuleFile
= path
.join(self
.OutputDir
, self
.Name
+ '.inf')
1656 if os
.path
.exists(ModuleFile
):
1657 CopyFileOnChange(ModuleFile
, FileDir
)
1658 if not self
.OutputFile
:
1659 Ma
= self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
1660 self
.OutputFile
= Ma
.Binaries
1661 for File
in self
.OutputFile
:
1663 if not os
.path
.isabs(File
):
1664 NewFile
= os
.path
.join(self
.OutputDir
, File
)
1665 if not os
.path
.exists(NewFile
):
1666 NewFile
= os
.path
.join(self
.FfsOutputDir
, File
)
1668 if os
.path
.exists(File
):
1669 if File
.lower().endswith('.ffs') or File
.lower().endswith('.offset') or File
.lower().endswith('.raw') \
1670 or File
.lower().endswith('.raw.txt'):
1671 self
.CacheCopyFile(FfsDir
, self
.FfsOutputDir
, File
)
1673 self
.CacheCopyFile(FileDir
, self
.OutputDir
, File
)
1675 def SaveHashChainFileToCache(self
, gDict
):
1676 if not GlobalData
.gBinCacheDest
:
1679 self
.GenPreMakefileHash(gDict
)
1680 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1681 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1682 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1685 self
.GenMakeHash(gDict
)
1686 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1687 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1688 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1689 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1692 # save the hash chain list as cache file
1693 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1694 CacheDestDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
1695 CacheHashDestDir
= path
.join(CacheDestDir
, MakeHashStr
)
1696 ModuleHashPair
= path
.join(CacheDestDir
, self
.Name
+ ".ModuleHashPair")
1697 MakeHashChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".MakeHashChain")
1698 ModuleFilesChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".ModuleFilesChain")
1700 # save the HashChainDict as json file
1701 CreateDirectory (CacheDestDir
)
1702 CreateDirectory (CacheHashDestDir
)
1704 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1705 if os
.path
.exists(ModuleHashPair
):
1706 with
open(ModuleHashPair
, 'r') as f
:
1707 ModuleHashPairList
= json
.load(f
)
1708 PreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
1709 MakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
1710 ModuleHashPairList
.append((PreMakeHash
, MakeHash
))
1711 ModuleHashPairList
= list(set(map(tuple, ModuleHashPairList
)))
1712 with
open(ModuleHashPair
, 'w') as f
:
1713 json
.dump(ModuleHashPairList
, f
, indent
=2)
1715 EdkLogger
.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair
)
1719 with
open(MakeHashChain
, 'w') as f
:
1720 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
, f
, indent
=2)
1722 EdkLogger
.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain
)
1726 with
open(ModuleFilesChain
, 'w') as f
:
1727 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
, f
, indent
=2)
1729 EdkLogger
.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain
)
1732 # save the autogenfile and makefile for debug usage
1733 CacheDebugDir
= path
.join(CacheHashDestDir
, "CacheDebug")
1734 CreateDirectory (CacheDebugDir
)
1735 CopyFileOnChange(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
, CacheDebugDir
)
1736 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1737 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1738 CopyFileOnChange(str(File
), CacheDebugDir
)
1742 ## Create makefile for the module and its dependent libraries
1744 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1745 # dependent libraries will be created
1747 @cached_class_function
1748 def CreateMakeFile(self
, CreateLibraryMakeFile
=True, GenFfsList
= []):
1749 gDict
= GlobalData
.gCacheIR
1750 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1751 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
1754 # nest this function inside it's only caller.
1755 def CreateTimeStamp():
1756 FileSet
= {self
.MetaFile
.Path
}
1758 for SourceFile
in self
.Module
.Sources
:
1759 FileSet
.add (SourceFile
.Path
)
1761 for Lib
in self
.DependentLibraryList
:
1762 FileSet
.add (Lib
.MetaFile
.Path
)
1764 for f
in self
.AutoGenDepSet
:
1765 FileSet
.add (f
.Path
)
1767 if os
.path
.exists (self
.TimeStampPath
):
1768 os
.remove (self
.TimeStampPath
)
1770 SaveFileOnChange(self
.TimeStampPath
, "\n".join(FileSet
), False)
1772 # Ignore generating makefile when it is a binary module
1773 if self
.IsBinaryModule
:
1776 self
.GenFfsList
= GenFfsList
1778 if not self
.IsLibrary
and CreateLibraryMakeFile
:
1779 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1780 LibraryAutoGen
.CreateMakeFile()
1782 # CanSkip uses timestamps to determine build skipping
1786 if len(self
.CustomMakefile
) == 0:
1787 Makefile
= GenMake
.ModuleMakefile(self
)
1789 Makefile
= GenMake
.CustomMakefile(self
)
1790 if Makefile
.Generate():
1791 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated makefile for module %s [%s]" %
1792 (self
.Name
, self
.Arch
))
1794 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of makefile for module %s [%s]" %
1795 (self
.Name
, self
.Arch
))
1799 MakefileType
= Makefile
._FileType
1800 MakefileName
= Makefile
._FILE
_NAME
_[MakefileType
]
1801 MakefilePath
= os
.path
.join(self
.MakeFileDir
, MakefileName
)
1803 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1804 MewIR
.MakefilePath
= MakefilePath
1805 MewIR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1806 MewIR
.CreateMakeFileDone
= True
1807 with GlobalData
.cache_lock
:
1809 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1810 IR
.MakefilePath
= MakefilePath
1811 IR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1812 IR
.CreateMakeFileDone
= True
1813 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1815 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1817 def CopyBinaryFiles(self
):
1818 for File
in self
.Module
.Binaries
:
1820 DstPath
= os
.path
.join(self
.OutputDir
, os
.path
.basename(SrcPath
))
1821 CopyLongFilePath(SrcPath
, DstPath
)
1822 ## Create autogen code for the module and its dependent libraries
1824 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1825 # dependent libraries will be created
1827 def CreateCodeFile(self
, CreateLibraryCodeFile
=True):
1828 gDict
= GlobalData
.gCacheIR
1829 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1830 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
1833 if self
.IsCodeFileCreated
:
1836 # Need to generate PcdDatabase even PcdDriver is binarymodule
1837 if self
.IsBinaryModule
and self
.PcdIsDriver
!= '':
1838 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
1840 if self
.IsBinaryModule
:
1842 self
.CopyBinaryFiles()
1845 if not self
.IsLibrary
and CreateLibraryCodeFile
:
1846 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1847 LibraryAutoGen
.CreateCodeFile()
1849 # CanSkip uses timestamps to determine build skipping
1854 IgoredAutoGenList
= []
1856 for File
in self
.AutoGenFileList
:
1857 if GenC
.Generate(File
.Path
, self
.AutoGenFileList
[File
], File
.IsBinary
):
1858 AutoGenList
.append(str(File
))
1860 IgoredAutoGenList
.append(str(File
))
1863 for ModuleType
in self
.DepexList
:
1864 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1865 if len(self
.DepexList
[ModuleType
]) == 0 or ModuleType
== SUP_MODULE_USER_DEFINED
or ModuleType
== SUP_MODULE_HOST_APPLICATION
:
1868 Dpx
= GenDepex
.DependencyExpression(self
.DepexList
[ModuleType
], ModuleType
, True)
1869 DpxFile
= gAutoGenDepexFileName
% {"module_name" : self
.Name
}
1871 if len(Dpx
.PostfixNotation
) != 0:
1872 self
.DepexGenerated
= True
1874 if Dpx
.Generate(path
.join(self
.OutputDir
, DpxFile
)):
1875 AutoGenList
.append(str(DpxFile
))
1877 IgoredAutoGenList
.append(str(DpxFile
))
1879 if IgoredAutoGenList
== []:
1880 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] files for module %s [%s]" %
1881 (" ".join(AutoGenList
), self
.Name
, self
.Arch
))
1882 elif AutoGenList
== []:
1883 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of [%s] files for module %s [%s]" %
1884 (" ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1886 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] (skipped %s) files for module %s [%s]" %
1887 (" ".join(AutoGenList
), " ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1889 self
.IsCodeFileCreated
= True
1890 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1891 MewIR
.CreateCodeFileDone
= True
1892 with GlobalData
.cache_lock
:
1894 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1895 IR
.CreateCodeFileDone
= True
1896 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1898 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1902 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1904 def LibraryAutoGenList(self
):
1906 for Library
in self
.DependentLibraryList
:
1913 self
.PlatformInfo
.MetaFile
,
1917 if La
not in RetVal
:
1919 for Lib
in La
.CodaTargetList
:
1920 self
._ApplyBuildRule
(Lib
.Target
, TAB_UNKNOWN_FILE
)
1923 def GenModuleHash(self
):
1924 # Initialize a dictionary for each arch type
1925 if self
.Arch
not in GlobalData
.gModuleHash
:
1926 GlobalData
.gModuleHash
[self
.Arch
] = {}
1928 # Early exit if module or library has been hashed and is in memory
1929 if self
.Name
in GlobalData
.gModuleHash
[self
.Arch
]:
1930 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1932 # Initialze hash object
1935 # Add Platform level hash
1936 m
.update(GlobalData
.gPlatformHash
.encode('utf-8'))
1938 # Add Package level hash
1939 if self
.DependentPackageList
:
1940 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
1941 if Pkg
.PackageName
in GlobalData
.gPackageHash
:
1942 m
.update(GlobalData
.gPackageHash
[Pkg
.PackageName
].encode('utf-8'))
1945 if self
.LibraryAutoGenList
:
1946 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
1947 if Lib
.Name
not in GlobalData
.gModuleHash
[self
.Arch
]:
1949 m
.update(GlobalData
.gModuleHash
[self
.Arch
][Lib
.Name
].encode('utf-8'))
1952 with
open(str(self
.MetaFile
), 'rb') as f
:
1956 # Add Module's source files
1957 if self
.SourceFileList
:
1958 for File
in sorted(self
.SourceFileList
, key
=lambda x
: str(x
)):
1959 f
= open(str(File
), 'rb')
1964 GlobalData
.gModuleHash
[self
.Arch
][self
.Name
] = m
.hexdigest()
1966 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1968 def GenModuleFilesHash(self
, gDict
):
1969 # Early exit if module or library has been hashed and is in memory
1970 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
1971 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
:
1972 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1974 # skip if the module cache already crashed
1975 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1976 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
1979 DependencyFileSet
= set()
1980 # Add Module Meta file
1981 DependencyFileSet
.add(self
.MetaFile
)
1983 # Add Module's source files
1984 if self
.SourceFileList
:
1985 for File
in set(self
.SourceFileList
):
1986 DependencyFileSet
.add(File
)
1988 # Add modules's include header files
1989 # Search dependency file list for each source file
1992 for Target
in self
.IntroTargetList
:
1993 SourceFileList
.extend(Target
.Inputs
)
1994 OutPutFileList
.extend(Target
.Outputs
)
1996 for Item
in OutPutFileList
:
1997 if Item
in SourceFileList
:
1998 SourceFileList
.remove(Item
)
2000 for file_path
in self
.IncludePathList
+ self
.BuildOptionIncPathList
:
2001 # skip the folders in platform BuildDir which are not been generated yet
2002 if file_path
.startswith(os
.path
.abspath(self
.PlatformInfo
.BuildDir
)+os
.sep
):
2004 SearchList
.append(file_path
)
2005 FileDependencyDict
= {}
2006 ForceIncludedFile
= []
2007 for F
in SourceFileList
:
2008 # skip the files which are not been generated yet, because
2009 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
2010 if not os
.path
.exists(F
.Path
):
2012 FileDependencyDict
[F
] = GenMake
.GetDependencyList(self
, self
.FileDependCache
, F
, ForceIncludedFile
, SearchList
)
2014 if FileDependencyDict
:
2015 for Dependency
in FileDependencyDict
.values():
2016 DependencyFileSet
.update(set(Dependency
))
2018 # Caculate all above dependency files hash
2019 # Initialze hash object
2022 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2023 if not os
.path
.exists(str(File
)):
2024 EdkLogger
.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2026 with
open(str(File
), 'rb') as f
:
2029 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2032 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
2033 MewIR
.ModuleFilesHashDigest
= m
.digest()
2034 MewIR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2035 MewIR
.ModuleFilesChain
= FileList
2036 with GlobalData
.cache_lock
:
2038 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2039 IR
.ModuleFilesHashDigest
= m
.digest()
2040 IR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2041 IR
.ModuleFilesChain
= FileList
2042 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2044 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
2046 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2048 def GenPreMakefileHash(self
, gDict
):
2049 # Early exit if module or library has been hashed and is in memory
2050 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2051 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2052 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2054 # skip if the module cache already crashed
2055 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2056 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2059 # skip binary module
2060 if self
.IsBinaryModule
:
2063 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2064 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2065 self
.GenModuleFilesHash(gDict
)
2067 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2068 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2069 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2072 # Initialze hash object
2075 # Add Platform level hash
2076 if ('PlatformHash') in gDict
:
2077 m
.update(gDict
[('PlatformHash')].encode('utf-8'))
2079 EdkLogger
.quiet("[cache warning]: PlatformHash is missing")
2081 # Add Package level hash
2082 if self
.DependentPackageList
:
2083 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
2084 if (Pkg
.PackageName
, 'PackageHash') in gDict
:
2085 m
.update(gDict
[(Pkg
.PackageName
, 'PackageHash')].encode('utf-8'))
2087 EdkLogger
.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg
.PackageName
, self
.MetaFile
.Name
, self
.Arch
))
2090 if self
.LibraryAutoGenList
:
2091 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2092 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2093 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
:
2094 Lib
.GenPreMakefileHash(gDict
)
2095 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
)
2098 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2100 with GlobalData
.cache_lock
:
2101 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2102 IR
.PreMakefileHashHexDigest
= m
.hexdigest()
2103 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2105 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2107 def GenMakeHeaderFilesHash(self
, gDict
):
2108 # Early exit if module or library has been hashed and is in memory
2109 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2110 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2111 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2113 # skip if the module cache already crashed
2114 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2115 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2118 # skip binary module
2119 if self
.IsBinaryModule
:
2122 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2123 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
2125 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.libConstPcd
:
2126 self
.ConstPcd
= GlobalData
.libConstPcd
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2127 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.Refes
:
2128 self
.ReferenceModules
= GlobalData
.Refes
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2129 self
.CreateCodeFile()
2130 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2131 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2132 self
.CreateMakeFile(GenFfsList
=GlobalData
.FfsCmd
.get((self
.MetaFile
.File
, self
.Arch
),[]))
2134 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2135 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
or \
2136 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2137 EdkLogger
.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2140 DependencyFileSet
= set()
2142 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
:
2143 DependencyFileSet
.add(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
)
2145 EdkLogger
.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2148 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2149 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2150 DependencyFileSet
.add(File
)
2152 EdkLogger
.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2155 if self
.AutoGenFileList
:
2156 for File
in set(self
.AutoGenFileList
):
2157 DependencyFileSet
.add(File
)
2159 # Caculate all above dependency files hash
2160 # Initialze hash object
2163 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2164 if not os
.path
.exists(str(File
)):
2165 EdkLogger
.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2167 f
= open(str(File
), 'rb')
2171 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2173 with GlobalData
.cache_lock
:
2174 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2175 IR
.AutoGenFileList
= self
.AutoGenFileList
.keys()
2176 IR
.MakeHeaderFilesHashChain
= FileList
2177 IR
.MakeHeaderFilesHashDigest
= m
.digest()
2178 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2180 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2182 def GenMakeHash(self
, gDict
):
2183 # Early exit if module or library has been hashed and is in memory
2184 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2185 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2186 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2188 # skip if the module cache already crashed
2189 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2190 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2193 # skip binary module
2194 if self
.IsBinaryModule
:
2197 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2198 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2199 self
.GenModuleFilesHash(gDict
)
2200 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2201 self
.GenMakeHeaderFilesHash(gDict
)
2203 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2204 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
or \
2205 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
or \
2206 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
or \
2207 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
:
2208 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2211 # Initialze hash object
2215 # Add hash of makefile and dependency header files
2216 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
)
2217 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
) - set(MakeHashChain
))
2218 New
.sort(key
=lambda x
: str(x
))
2219 MakeHashChain
+= New
2222 if self
.LibraryAutoGenList
:
2223 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2224 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2225 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
:
2226 Lib
.GenMakeHash(gDict
)
2227 if not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
:
2228 print("Cannot generate MakeHash for lib module:", Lib
.MetaFile
.Path
, Lib
.Arch
)
2230 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
)
2231 New
= list(set(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
) - set(MakeHashChain
))
2232 New
.sort(key
=lambda x
: str(x
))
2233 MakeHashChain
+= New
2236 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2237 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
) - set(MakeHashChain
))
2238 New
.sort(key
=lambda x
: str(x
))
2239 MakeHashChain
+= New
2241 with GlobalData
.cache_lock
:
2242 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2243 IR
.MakeHashDigest
= m
.digest()
2244 IR
.MakeHashHexDigest
= m
.hexdigest()
2245 IR
.MakeHashChain
= MakeHashChain
2246 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2248 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2250 ## Decide whether we can skip the left autogen and make process
2251 def CanSkipbyPreMakefileCache(self
, gDict
):
2252 if not GlobalData
.gBinCacheSource
:
2255 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakeCacheHit
:
2258 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2261 # If Module is binary, do not skip by cache
2262 if self
.IsBinaryModule
:
2265 # .inc is contains binary information so do not skip by hash as well
2266 for f_ext
in self
.SourceFileList
:
2267 if '.inc' in str(f_ext
):
2270 # Get the module hash values from stored cache and currrent build
2271 # then check whether cache hit based on the hash values
2272 # if cache hit, restore all the files from cache
2273 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2274 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2276 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2277 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2278 if not os
.path
.exists(ModuleHashPair
):
2279 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2280 with GlobalData
.cache_lock
:
2281 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2282 IR
.CacheCrash
= True
2283 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2287 with
open(ModuleHashPair
, 'r') as f
:
2288 ModuleHashPairList
= json
.load(f
)
2290 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2293 self
.GenPreMakefileHash(gDict
)
2294 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2295 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2296 EdkLogger
.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2300 CurrentPreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
2301 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2302 if PreMakefileHash
== CurrentPreMakeHash
:
2303 MakeHashStr
= str(MakeHash
)
2308 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2309 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2311 if not os
.path
.exists(TargetHashDir
):
2312 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2315 for root
, dir, files
in os
.walk(TargetHashDir
):
2317 File
= path
.join(root
, f
)
2318 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2319 if os
.path
.exists(TargetFfsHashDir
):
2320 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2322 File
= path
.join(root
, f
)
2323 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2325 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2326 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2328 with GlobalData
.cache_lock
:
2329 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2330 IR
.PreMakeCacheHit
= True
2331 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2332 print("[cache hit]: checkpoint_PreMakefile:", self
.MetaFile
.Path
, self
.Arch
)
2333 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2336 ## Decide whether we can skip the make process
2337 def CanSkipbyMakeCache(self
, gDict
):
2338 if not GlobalData
.gBinCacheSource
:
2341 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2344 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2347 # If Module is binary, do not skip by cache
2348 if self
.IsBinaryModule
:
2349 print("[cache miss]: checkpoint_Makefile: binary module:", self
.MetaFile
.Path
, self
.Arch
)
2352 # .inc is contains binary information so do not skip by hash as well
2353 for f_ext
in self
.SourceFileList
:
2354 if '.inc' in str(f_ext
):
2355 with GlobalData
.cache_lock
:
2356 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2357 IR
.MakeCacheHit
= False
2358 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2359 print("[cache miss]: checkpoint_Makefile: .inc module:", self
.MetaFile
.Path
, self
.Arch
)
2362 # Get the module hash values from stored cache and currrent build
2363 # then check whether cache hit based on the hash values
2364 # if cache hit, restore all the files from cache
2365 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2366 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2368 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2369 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2370 if not os
.path
.exists(ModuleHashPair
):
2371 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2372 with GlobalData
.cache_lock
:
2373 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2374 IR
.CacheCrash
= True
2375 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2379 with
open(ModuleHashPair
, 'r') as f
:
2380 ModuleHashPairList
= json
.load(f
)
2382 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2385 self
.GenMakeHash(gDict
)
2386 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2387 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
2388 EdkLogger
.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2392 CurrentMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
2393 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2394 if MakeHash
== CurrentMakeHash
:
2395 MakeHashStr
= str(MakeHash
)
2398 print("[cache miss]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2401 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2402 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2403 if not os
.path
.exists(TargetHashDir
):
2404 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2407 for root
, dir, files
in os
.walk(TargetHashDir
):
2409 File
= path
.join(root
, f
)
2410 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2412 if os
.path
.exists(TargetFfsHashDir
):
2413 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2415 File
= path
.join(root
, f
)
2416 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2418 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2419 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2420 with GlobalData
.cache_lock
:
2421 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2422 IR
.MakeCacheHit
= True
2423 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2424 print("[cache hit]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2427 ## Show the first file name which causes cache miss
2428 def PrintFirstMakeCacheMissFile(self
, gDict
):
2429 if not GlobalData
.gBinCacheSource
:
2432 # skip if the module cache already crashed
2433 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CacheCrash
:
2436 # skip binary module
2437 if self
.IsBinaryModule
:
2440 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2443 # Only print cache miss file for the MakeCache not hit module
2444 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2447 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2448 EdkLogger
.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2451 # Find the cache dir name through the .ModuleHashPair file info
2452 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2454 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2455 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2456 if not os
.path
.exists(ModuleHashPair
):
2457 EdkLogger
.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2461 with
open(ModuleHashPair
, 'r') as f
:
2462 ModuleHashPairList
= json
.load(f
)
2464 EdkLogger
.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2468 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2469 TargetHashDir
= path
.join(FileDir
, str(MakeHash
))
2470 if os
.path
.exists(TargetHashDir
):
2471 MakeHashSet
.add(MakeHash
)
2473 EdkLogger
.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2476 TargetHash
= list(MakeHashSet
)[0]
2477 TargetHashDir
= path
.join(FileDir
, str(TargetHash
))
2478 if len(MakeHashSet
) > 1 :
2479 EdkLogger
.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash
, self
.MetaFile
.Path
, self
.Arch
))
2481 ListFile
= path
.join(TargetHashDir
, self
.Name
+ '.MakeHashChain')
2482 if os
.path
.exists(ListFile
):
2484 f
= open(ListFile
, 'r')
2485 CachedList
= json
.load(f
)
2488 EdkLogger
.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile
)
2491 EdkLogger
.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile
)
2494 CurrentList
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
2495 for idx
, (file, hash) in enumerate (CurrentList
):
2496 (filecached
, hashcached
) = CachedList
[idx
]
2497 if file != filecached
:
2498 EdkLogger
.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self
.MetaFile
.Path
, self
.Arch
, file, filecached
))
2500 if hash != hashcached
:
2501 EdkLogger
.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self
.MetaFile
.Path
, self
.Arch
, file))
2506 ## Decide whether we can skip the ModuleAutoGen process
2507 def CanSkipbyCache(self
, gDict
):
2508 # Hashing feature is off
2509 if not GlobalData
.gBinCacheSource
:
2512 if self
in GlobalData
.gBuildHashSkipTracking
:
2513 return GlobalData
.gBuildHashSkipTracking
[self
]
2515 # If library or Module is binary do not skip by hash
2516 if self
.IsBinaryModule
:
2517 GlobalData
.gBuildHashSkipTracking
[self
] = False
2520 # .inc is contains binary information so do not skip by hash as well
2521 for f_ext
in self
.SourceFileList
:
2522 if '.inc' in str(f_ext
):
2523 GlobalData
.gBuildHashSkipTracking
[self
] = False
2526 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2529 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakeCacheHit
:
2530 GlobalData
.gBuildHashSkipTracking
[self
] = True
2533 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2534 GlobalData
.gBuildHashSkipTracking
[self
] = True
2539 ## Decide whether we can skip the ModuleAutoGen process
2540 # If any source file is newer than the module than we cannot skip
2543 # Don't skip if cache feature enabled
2544 if GlobalData
.gUseHashCache
or GlobalData
.gBinCacheDest
or GlobalData
.gBinCacheSource
:
2546 if self
.MakeFileDir
in GlobalData
.gSikpAutoGenCache
:
2548 if not os
.path
.exists(self
.TimeStampPath
):
2550 #last creation time of the module
2551 DstTimeStamp
= os
.stat(self
.TimeStampPath
)[8]
2553 SrcTimeStamp
= self
.Workspace
._SrcTimeStamp
2554 if SrcTimeStamp
> DstTimeStamp
:
2557 with
open(self
.TimeStampPath
,'r') as f
:
2559 source
= source
.rstrip('\n')
2560 if not os
.path
.exists(source
):
2562 if source
not in ModuleAutoGen
.TimeDict
:
2563 ModuleAutoGen
.TimeDict
[source
] = os
.stat(source
)[8]
2564 if ModuleAutoGen
.TimeDict
[source
] > DstTimeStamp
:
2566 GlobalData
.gSikpAutoGenCache
.add(self
.MakeFileDir
)
2570 def TimeStampPath(self
):
2571 return os
.path
.join(self
.MakeFileDir
, 'AutoGenTimeStamp')