2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 from AutoGen
.AutoGen
import AutoGen
9 from Common
.LongFilePathSupport
import CopyLongFilePath
10 from Common
.BuildToolError
import *
11 from Common
.DataType
import *
12 from Common
.Misc
import *
13 from Common
.StringUtils
import NormPath
,GetSplitList
14 from collections
import defaultdict
15 from Workspace
.WorkspaceCommon
import OrderedListDict
16 import os
.path
as path
19 from . import InfSectionParser
22 from . import GenDepex
23 from io
import BytesIO
24 from GenPatchPcdTable
.GenPatchPcdTable
import parsePcdInfoFromMapFile
25 from Workspace
.MetaFileCommentParser
import UsageList
26 from .GenPcdDb
import CreatePcdDatabaseCode
27 from Common
.caching
import cached_class_function
28 from AutoGen
.ModuleAutoGenHelper
import PlatformInfo
,WorkSpaceInfo
29 from AutoGen
.CacheIR
import ModuleBuildCacheIR
32 ## Mapping Makefile type
33 gMakeTypeMap
= {TAB_COMPILER_MSFT
:"nmake", "GCC":"gmake"}
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
36 # is the former use /I , the Latter used -I to specify include directories
38 gBuildOptIncludePatternMsft
= re
.compile(r
"(?:.*?)/I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
39 gBuildOptIncludePatternOther
= re
.compile(r
"(?:.*?)-I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
41 ## default file name for AutoGen
42 gAutoGenCodeFileName
= "AutoGen.c"
43 gAutoGenHeaderFileName
= "AutoGen.h"
44 gAutoGenStringFileName
= "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName
= "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName
= "%(module_name)s.depex"
47 gAutoGenImageDefFileName
= "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName
= "%(module_name)sIdf.hpk"
49 gInfSpecVersion
= "0x00010017"
52 # Match name = variable
54 gEfiVarStoreNamePattern
= re
.compile("\s*name\s*=\s*(\w+)")
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
59 gEfiVarStoreGuidPattern
= re
.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
62 # Template string to generic AsBuilt INF
64 gAsBuiltInfHeaderString
= TemplateString("""${header_comments}
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
92 [Packages.${module_arch}]${BEGIN}
95 [Binaries.${module_arch}]${BEGIN}
98 [PatchPcd.${module_arch}]${BEGIN}
102 [Protocols.${module_arch}]${BEGIN}
106 [Ppis.${module_arch}]${BEGIN}
110 [Guids.${module_arch}]${BEGIN}
114 [PcdEx.${module_arch}]${BEGIN}
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
124 ${userextension_tianocore_item}
128 [BuildOptions.${module_arch}]
130 ## ${flags_item}${END}
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
136 def ExtendCopyDictionaryLists(CopyToDict
, CopyFromDict
):
137 for Key
in CopyFromDict
:
138 CopyToDict
[Key
].extend(CopyFromDict
[Key
])
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList
):
142 RetVal
= path
.join(*PathList
)
143 CreateDirectory(RetVal
)
147 # Convert string to C format array
149 def _ConvertStringToByteArray(Value
):
150 Value
= Value
.strip()
154 if not Value
.endswith('}'):
156 Value
= Value
.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields
= Value
.split(',')
159 for Index
in range(len(ValFields
)):
160 ValFields
[Index
] = str(int(ValFields
[Index
], 0))
163 Value
= '{' + ','.join(ValFields
) + '}'
167 if Value
.startswith('L"'):
168 if not Value
.endswith('"'):
172 elif not Value
.startswith('"') or not Value
.endswith('"'):
175 Value
= eval(Value
) # translate escape character
177 for Index
in range(0, len(Value
)):
179 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x10000) + ','
181 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x100) + ','
182 Value
= NewValue
+ '0}'
185 ## ModuleAutoGen class
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
191 class ModuleAutoGen(AutoGen
):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
194 if not hasattr(self
, "_Init"):
195 self
._InitWorker
(Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
)
198 ## Cache the timestamps of metafiles of every module in a class attribute
202 def __new__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace
, args
[0], Target
, Toolchain
, Arch
,args
[-1]).ValidModule(MetaFile
):
205 EdkLogger
.verbose("Module [%s] for [%s] is not employed by active platform\n" \
208 return super(ModuleAutoGen
, cls
).__new
__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
)
210 ## Initialize ModuleAutoGen
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
219 def _InitWorker(self
, Workspace
, ModuleFile
, Target
, Toolchain
, Arch
, PlatformFile
,DataPipe
):
220 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "AutoGen module [%s] [%s]" % (ModuleFile
, Arch
))
221 GlobalData
.gProcessingFile
= "%s [%s, %s, %s]" % (ModuleFile
, Arch
, Toolchain
, Target
)
223 self
.Workspace
= Workspace
224 self
.WorkspaceDir
= ""
225 self
.PlatformInfo
= None
226 self
.DataPipe
= DataPipe
227 self
.__init
_platform
_info
__()
228 self
.MetaFile
= ModuleFile
229 self
.SourceDir
= self
.MetaFile
.SubDir
230 self
.SourceDir
= mws
.relpath(self
.SourceDir
, self
.WorkspaceDir
)
232 self
.ToolChain
= Toolchain
233 self
.BuildTarget
= Target
235 self
.ToolChainFamily
= self
.PlatformInfo
.ToolChainFamily
236 self
.BuildRuleFamily
= self
.PlatformInfo
.BuildRuleFamily
238 self
.IsCodeFileCreated
= False
239 self
.IsAsBuiltInfCreated
= False
240 self
.DepexGenerated
= False
242 self
.BuildDatabase
= self
.Workspace
.BuildDatabase
243 self
.BuildRuleOrder
= None
246 self
._GuidComments
= OrderedListDict()
247 self
._ProtocolComments
= OrderedListDict()
248 self
._PpiComments
= OrderedListDict()
249 self
._BuildTargets
= None
250 self
._IntroBuildTargetList
= None
251 self
._FinalBuildTargetList
= None
252 self
._FileTypes
= None
254 self
.AutoGenDepSet
= set()
255 self
.ReferenceModules
= []
258 self
.FileDependCache
= {}
260 def __init_platform_info__(self
):
261 pinfo
= self
.DataPipe
.Get("P_Info")
262 self
.WorkspaceDir
= pinfo
.get("WorkspaceDir")
263 self
.PlatformInfo
= PlatformInfo(self
.Workspace
,pinfo
.get("ActivePlatform"),pinfo
.get("Target"),pinfo
.get("ToolChain"),pinfo
.get("Arch"),self
.DataPipe
)
264 ## hash() operator of ModuleAutoGen
266 # The module file path and arch string will be used to represent
267 # hash value of this object
269 # @retval int Hash value of the module file path and arch
271 @cached_class_function
273 return hash((self
.MetaFile
, self
.Arch
))
275 return "%s [%s]" % (self
.MetaFile
, self
.Arch
)
277 # Get FixedAtBuild Pcds of this Module
279 def FixedAtBuildPcds(self
):
281 for Pcd
in self
.ModulePcdList
:
282 if Pcd
.Type
!= TAB_PCDS_FIXED_AT_BUILD
:
284 if Pcd
not in RetVal
:
289 def FixedVoidTypePcds(self
):
291 for Pcd
in self
.FixedAtBuildPcds
:
292 if Pcd
.DatumType
== TAB_VOID
:
293 if '.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
)) not in RetVal
:
294 RetVal
['.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
))] = Pcd
.DefaultValue
298 def UniqueBaseName(self
):
299 ModuleNames
= self
.DataPipe
.Get("M_Name")
302 return ModuleNames
.get((self
.Name
,self
.MetaFile
),self
.Name
)
304 # Macros could be used in build_rule.txt (also Makefile)
308 ("WORKSPACE" ,self
.WorkspaceDir
),
309 ("MODULE_NAME" ,self
.Name
),
310 ("MODULE_NAME_GUID" ,self
.UniqueBaseName
),
311 ("MODULE_GUID" ,self
.Guid
),
312 ("MODULE_VERSION" ,self
.Version
),
313 ("MODULE_TYPE" ,self
.ModuleType
),
314 ("MODULE_FILE" ,str(self
.MetaFile
)),
315 ("MODULE_FILE_BASE_NAME" ,self
.MetaFile
.BaseName
),
316 ("MODULE_RELATIVE_DIR" ,self
.SourceDir
),
317 ("MODULE_DIR" ,self
.SourceDir
),
318 ("BASE_NAME" ,self
.Name
),
320 ("TOOLCHAIN" ,self
.ToolChain
),
321 ("TOOLCHAIN_TAG" ,self
.ToolChain
),
322 ("TOOL_CHAIN_TAG" ,self
.ToolChain
),
323 ("TARGET" ,self
.BuildTarget
),
324 ("BUILD_DIR" ,self
.PlatformInfo
.BuildDir
),
325 ("BIN_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
326 ("LIB_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
327 ("MODULE_BUILD_DIR" ,self
.BuildDir
),
328 ("OUTPUT_DIR" ,self
.OutputDir
),
329 ("DEBUG_DIR" ,self
.DebugDir
),
330 ("DEST_DIR_OUTPUT" ,self
.OutputDir
),
331 ("DEST_DIR_DEBUG" ,self
.DebugDir
),
332 ("PLATFORM_NAME" ,self
.PlatformInfo
.Name
),
333 ("PLATFORM_GUID" ,self
.PlatformInfo
.Guid
),
334 ("PLATFORM_VERSION" ,self
.PlatformInfo
.Version
),
335 ("PLATFORM_RELATIVE_DIR" ,self
.PlatformInfo
.SourceDir
),
336 ("PLATFORM_DIR" ,mws
.join(self
.WorkspaceDir
, self
.PlatformInfo
.SourceDir
)),
337 ("PLATFORM_OUTPUT_DIR" ,self
.PlatformInfo
.OutputDir
),
338 ("FFS_OUTPUT_DIR" ,self
.FfsOutputDir
)
341 ## Return the module build data object
344 return self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
346 ## Return the module name
349 return self
.Module
.BaseName
351 ## Return the module DxsFile if exist
354 return self
.Module
.DxsFile
356 ## Return the module meta-file GUID
360 # To build same module more than once, the module path with FILE_GUID overridden has
361 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
362 # in DSC. The overridden GUID can be retrieved from file name
364 if os
.path
.basename(self
.MetaFile
.File
) != os
.path
.basename(self
.MetaFile
.Path
):
366 # Length of GUID is 36
368 return os
.path
.basename(self
.MetaFile
.Path
)[:36]
369 return self
.Module
.Guid
371 ## Return the module version
374 return self
.Module
.Version
376 ## Return the module type
378 def ModuleType(self
):
379 return self
.Module
.ModuleType
381 ## Return the component type (for Edk.x style of module)
383 def ComponentType(self
):
384 return self
.Module
.ComponentType
386 ## Return the build type
389 return self
.Module
.BuildType
391 ## Return the PCD_IS_DRIVER setting
393 def PcdIsDriver(self
):
394 return self
.Module
.PcdIsDriver
396 ## Return the autogen version, i.e. module meta-file version
398 def AutoGenVersion(self
):
399 return self
.Module
.AutoGenVersion
401 ## Check if the module is library or not
404 return bool(self
.Module
.LibraryClass
)
406 ## Check if the module is binary module or not
408 def IsBinaryModule(self
):
409 return self
.Module
.IsBinaryModule
411 ## Return the directory to store intermediate files of the module
415 self
.PlatformInfo
.BuildDir
,
418 self
.MetaFile
.BaseName
421 ## Return the directory to store the intermediate object files of the module
424 return _MakeDir((self
.BuildDir
, "OUTPUT"))
426 ## Return the directory path to store ffs file
428 def FfsOutputDir(self
):
429 if GlobalData
.gFdfParser
:
430 return path
.join(self
.PlatformInfo
.BuildDir
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
433 ## Return the directory to store auto-gened source files of the module
436 return _MakeDir((self
.BuildDir
, "DEBUG"))
438 ## Return the path of custom file
440 def CustomMakefile(self
):
442 for Type
in self
.Module
.CustomMakefile
:
443 MakeType
= gMakeTypeMap
[Type
] if Type
in gMakeTypeMap
else 'nmake'
444 File
= os
.path
.join(self
.SourceDir
, self
.Module
.CustomMakefile
[Type
])
445 RetVal
[MakeType
] = File
448 ## Return the directory of the makefile
450 # @retval string The directory string of module's makefile
453 def MakeFileDir(self
):
456 ## Return build command string
458 # @retval string Build command string
461 def BuildCommand(self
):
462 return self
.PlatformInfo
.BuildCommand
464 ## Get object list of all packages the module and its dependent libraries belong to
466 # @retval list The list of package object
469 def DerivedPackageList(self
):
471 for M
in [self
.Module
] + self
.DependentLibraryList
:
472 for Package
in M
.Packages
:
473 if Package
in PackageList
:
475 PackageList
.append(Package
)
478 ## Get the depex string
480 # @return : a string contain all depex expression.
481 def _GetDepexExpresionString(self
):
484 ## DPX_SOURCE IN Define section.
485 if self
.Module
.DxsFile
:
487 for M
in [self
.Module
] + self
.DependentLibraryList
:
488 Filename
= M
.MetaFile
.Path
489 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
490 DepexExpressionList
= InfObj
.GetDepexExpresionList()
491 for DepexExpression
in DepexExpressionList
:
492 for key
in DepexExpression
:
493 Arch
, ModuleType
= key
494 DepexExpr
= [x
for x
in DepexExpression
[key
] if not str(x
).startswith('#')]
495 # the type of build module is USER_DEFINED.
496 # All different DEPEX section tags would be copied into the As Built INF file
497 # and there would be separate DEPEX section tags
498 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
499 if (Arch
.upper() == self
.Arch
.upper()) and (ModuleType
.upper() != TAB_ARCH_COMMON
):
500 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
502 if Arch
.upper() == TAB_ARCH_COMMON
or \
503 (Arch
.upper() == self
.Arch
.upper() and \
504 ModuleType
.upper() in [TAB_ARCH_COMMON
, self
.ModuleType
.upper()]):
505 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
507 #the type of build module is USER_DEFINED.
508 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
509 for Depex
in DepexList
:
511 DepexStr
+= '[Depex.%s.%s]\n' % key
512 DepexStr
+= '\n'.join('# '+ val
for val
in Depex
[key
])
515 return '[Depex.%s]\n' % self
.Arch
518 #the type of build module not is USER_DEFINED.
520 for Depex
in DepexList
:
525 for D
in Depex
.values():
526 DepexStr
+= ' '.join(val
for val
in D
)
527 Index
= DepexStr
.find('END')
528 if Index
> -1 and Index
== len(DepexStr
) - 3:
529 DepexStr
= DepexStr
[:-3]
530 DepexStr
= DepexStr
.strip()
533 DepexStr
= DepexStr
.lstrip('(').rstrip(')').strip()
535 return '[Depex.%s]\n' % self
.Arch
536 return '[Depex.%s]\n# ' % self
.Arch
+ DepexStr
538 ## Merge dependency expression
540 # @retval list The token list of the dependency expression after parsed
544 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
549 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
551 FixedVoidTypePcds
= {}
552 for M
in [self
] + self
.LibraryAutoGenList
:
553 FixedVoidTypePcds
.update(M
.FixedVoidTypePcds
)
554 for M
in [self
] + self
.LibraryAutoGenList
:
556 for D
in M
.Module
.Depex
[self
.Arch
, self
.ModuleType
]:
558 DepexList
.append('AND')
559 DepexList
.append('(')
560 #replace D with value if D is FixedAtBuild PCD
567 Value
= FixedVoidTypePcds
[item
]
568 if len(Value
.split(',')) != 16:
569 EdkLogger
.error("build", FORMAT_INVALID
,
570 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item
))
571 NewList
.append(Value
)
573 EdkLogger
.error("build", FORMAT_INVALID
, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item
))
575 DepexList
.extend(NewList
)
576 if DepexList
[-1] == 'END': # no need of a END at this time
578 DepexList
.append(')')
581 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.Module
.BaseName
, DepexList
))
582 if 'BEFORE' in DepexList
or 'AFTER' in DepexList
:
584 if len(DepexList
) > 0:
585 EdkLogger
.verbose('')
586 return {self
.ModuleType
:DepexList
}
588 ## Merge dependency expression
590 # @retval list The token list of the dependency expression after parsed
593 def DepexExpressionDict(self
):
594 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
597 DepexExpressionString
= ''
599 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
601 for M
in [self
.Module
] + self
.DependentLibraryList
:
603 for D
in M
.DepexExpression
[self
.Arch
, self
.ModuleType
]:
604 if DepexExpressionString
!= '':
605 DepexExpressionString
+= ' AND '
606 DepexExpressionString
+= '('
607 DepexExpressionString
+= D
608 DepexExpressionString
= DepexExpressionString
.rstrip('END').strip()
609 DepexExpressionString
+= ')'
612 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.BaseName
, DepexExpressionString
))
613 if 'BEFORE' in DepexExpressionString
or 'AFTER' in DepexExpressionString
:
615 if len(DepexExpressionString
) > 0:
616 EdkLogger
.verbose('')
618 return {self
.ModuleType
:DepexExpressionString
}
620 # Get the tiano core user extension, it is contain dependent library.
621 # @retval: a list contain tiano core userextension.
623 def _GetTianoCoreUserExtensionList(self
):
624 TianoCoreUserExtentionList
= []
625 for M
in [self
.Module
] + self
.DependentLibraryList
:
626 Filename
= M
.MetaFile
.Path
627 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
628 TianoCoreUserExtenList
= InfObj
.GetUserExtensionTianoCore()
629 for TianoCoreUserExtent
in TianoCoreUserExtenList
:
630 for Section
in TianoCoreUserExtent
:
631 ItemList
= Section
.split(TAB_SPLIT
)
633 if len(ItemList
) == 4:
635 if Arch
.upper() == TAB_ARCH_COMMON
or Arch
.upper() == self
.Arch
.upper():
637 TianoCoreList
.extend([TAB_SECTION_START
+ Section
+ TAB_SECTION_END
])
638 TianoCoreList
.extend(TianoCoreUserExtent
[Section
][:])
639 TianoCoreList
.append('\n')
640 TianoCoreUserExtentionList
.append(TianoCoreList
)
642 return TianoCoreUserExtentionList
644 ## Return the list of specification version required for the module
646 # @retval list The list of specification defined in module file
649 def Specification(self
):
650 return self
.Module
.Specification
652 ## Tool option for the module build
654 # @param PlatformInfo The object of PlatformBuildInfo
655 # @retval dict The dict containing valid options
658 def BuildOption(self
):
659 RetVal
, self
.BuildRuleOrder
= self
.PlatformInfo
.ApplyBuildOption(self
.Module
)
660 if self
.BuildRuleOrder
:
661 self
.BuildRuleOrder
= ['.%s' % Ext
for Ext
in self
.BuildRuleOrder
.split()]
664 ## Get include path list from tool option for the module build
666 # @retval list The include path list
669 def BuildOptionIncPathList(self
):
671 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
672 # is the former use /I , the Latter used -I to specify include directories
674 if self
.PlatformInfo
.ToolChainFamily
in (TAB_COMPILER_MSFT
):
675 BuildOptIncludeRegEx
= gBuildOptIncludePatternMsft
676 elif self
.PlatformInfo
.ToolChainFamily
in ('INTEL', 'GCC', 'RVCT'):
677 BuildOptIncludeRegEx
= gBuildOptIncludePatternOther
680 # New ToolChainFamily, don't known whether there is option to specify include directories
685 for Tool
in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
687 FlagOption
= self
.BuildOption
[Tool
]['FLAGS']
691 if self
.ToolChainFamily
!= 'RVCT':
692 IncPathList
= [NormPath(Path
, self
.Macros
) for Path
in BuildOptIncludeRegEx
.findall(FlagOption
)]
695 # RVCT may specify a list of directory seperated by commas
698 for Path
in BuildOptIncludeRegEx
.findall(FlagOption
):
699 PathList
= GetSplitList(Path
, TAB_COMMA_SPLIT
)
700 IncPathList
.extend(NormPath(PathEntry
, self
.Macros
) for PathEntry
in PathList
)
703 # EDK II modules must not reference header files outside of the packages they depend on or
704 # within the module's directory tree. Report error if violation.
706 if GlobalData
.gDisableIncludePathCheck
== False:
707 for Path
in IncPathList
:
708 if (Path
not in self
.IncludePathList
) and (CommonPath([Path
, self
.MetaFile
.Dir
]) != self
.MetaFile
.Dir
):
709 ErrMsg
= "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path
, Tool
, FlagOption
)
710 EdkLogger
.error("build",
713 File
=str(self
.MetaFile
))
714 RetVal
+= IncPathList
717 ## Return a list of files which can be built from source
719 # What kind of files can be built is determined by build rules in
720 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
723 def SourceFileList(self
):
725 ToolChainTagSet
= {"", TAB_STAR
, self
.ToolChain
}
726 ToolChainFamilySet
= {"", TAB_STAR
, self
.ToolChainFamily
, self
.BuildRuleFamily
}
727 for F
in self
.Module
.Sources
:
729 if F
.TagName
not in ToolChainTagSet
:
730 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "The toolchain [%s] for processing file [%s] is found, "
731 "but [%s] is currently used" % (F
.TagName
, str(F
), self
.ToolChain
))
733 # match tool chain family or build rule family
734 if F
.ToolChainFamily
not in ToolChainFamilySet
:
737 "The file [%s] must be built by tools of [%s], " \
738 "but current toolchain family is [%s], buildrule family is [%s]" \
739 % (str(F
), F
.ToolChainFamily
, self
.ToolChainFamily
, self
.BuildRuleFamily
))
742 # add the file path into search path list for file including
743 if F
.Dir
not in self
.IncludePathList
:
744 self
.IncludePathList
.insert(0, F
.Dir
)
747 self
._MatchBuildRuleOrder
(RetVal
)
750 self
._ApplyBuildRule
(F
, TAB_UNKNOWN_FILE
)
753 def _MatchBuildRuleOrder(self
, FileList
):
756 for SingleFile
in FileList
:
757 if self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRules
:
758 key
= SingleFile
.Path
.rsplit(SingleFile
.Ext
,1)[0]
759 if key
in Order_Dict
:
760 Order_Dict
[key
].append(SingleFile
.Ext
)
762 Order_Dict
[key
] = [SingleFile
.Ext
]
766 if len(Order_Dict
[F
]) > 1:
767 Order_Dict
[F
].sort(key
=lambda i
: self
.BuildRuleOrder
.index(i
))
768 for Ext
in Order_Dict
[F
][1:]:
769 RemoveList
.append(F
+ Ext
)
771 for item
in RemoveList
:
772 FileList
.remove(item
)
776 ## Return the list of unicode files
778 def UnicodeFileList(self
):
779 return self
.FileTypes
.get(TAB_UNICODE_FILE
,[])
781 ## Return the list of vfr files
783 def VfrFileList(self
):
784 return self
.FileTypes
.get(TAB_VFR_FILE
, [])
786 ## Return the list of Image Definition files
788 def IdfFileList(self
):
789 return self
.FileTypes
.get(TAB_IMAGE_FILE
,[])
791 ## Return a list of files which can be built from binary
793 # "Build" binary files are just to copy them to build directory.
795 # @retval list The list of files which can be built later
798 def BinaryFileList(self
):
800 for F
in self
.Module
.Binaries
:
801 if F
.Target
not in [TAB_ARCH_COMMON
, TAB_STAR
] and F
.Target
!= self
.BuildTarget
:
804 self
._ApplyBuildRule
(F
, F
.Type
, BinaryFileList
=RetVal
)
808 def BuildRules(self
):
810 BuildRuleDatabase
= self
.PlatformInfo
.BuildRule
811 for Type
in BuildRuleDatabase
.FileTypeList
:
812 #first try getting build rule by BuildRuleFamily
813 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.BuildRuleFamily
]
815 # build type is always module type, but ...
816 if self
.ModuleType
!= self
.BuildType
:
817 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.BuildRuleFamily
]
818 #second try getting build rule by ToolChainFamily
820 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.ToolChainFamily
]
822 # build type is always module type, but ...
823 if self
.ModuleType
!= self
.BuildType
:
824 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.ToolChainFamily
]
827 RuleObject
= RuleObject
.Instantiate(self
.Macros
)
828 RetVal
[Type
] = RuleObject
829 for Ext
in RuleObject
.SourceFileExtList
:
830 RetVal
[Ext
] = RuleObject
833 def _ApplyBuildRule(self
, File
, FileType
, BinaryFileList
=None):
834 if self
._BuildTargets
is None:
835 self
._IntroBuildTargetList
= set()
836 self
._FinalBuildTargetList
= set()
837 self
._BuildTargets
= defaultdict(set)
838 self
._FileTypes
= defaultdict(set)
840 if not BinaryFileList
:
841 BinaryFileList
= self
.BinaryFileList
843 SubDirectory
= os
.path
.join(self
.OutputDir
, File
.SubDir
)
844 if not os
.path
.exists(SubDirectory
):
845 CreateDirectory(SubDirectory
)
851 # Make sure to get build rule order value
855 while Index
< len(SourceList
):
856 Source
= SourceList
[Index
]
860 CreateDirectory(Source
.Dir
)
862 if File
.IsBinary
and File
== Source
and File
in BinaryFileList
:
863 # Skip all files that are not binary libraries
864 if not self
.IsLibrary
:
866 RuleObject
= self
.BuildRules
[TAB_DEFAULT_BINARY_FILE
]
867 elif FileType
in self
.BuildRules
:
868 RuleObject
= self
.BuildRules
[FileType
]
869 elif Source
.Ext
in self
.BuildRules
:
870 RuleObject
= self
.BuildRules
[Source
.Ext
]
872 # stop at no more rules
874 self
._FinalBuildTargetList
.add(LastTarget
)
877 FileType
= RuleObject
.SourceFileType
878 self
._FileTypes
[FileType
].add(Source
)
880 # stop at STATIC_LIBRARY for library
881 if self
.IsLibrary
and FileType
== TAB_STATIC_LIBRARY
:
883 self
._FinalBuildTargetList
.add(LastTarget
)
886 Target
= RuleObject
.Apply(Source
, self
.BuildRuleOrder
)
889 self
._FinalBuildTargetList
.add(LastTarget
)
891 elif not Target
.Outputs
:
892 # Only do build for target with outputs
893 self
._FinalBuildTargetList
.add(Target
)
895 self
._BuildTargets
[FileType
].add(Target
)
897 if not Source
.IsBinary
and Source
== File
:
898 self
._IntroBuildTargetList
.add(Target
)
900 # to avoid cyclic rule
901 if FileType
in RuleChain
:
904 RuleChain
.add(FileType
)
905 SourceList
.extend(Target
.Outputs
)
907 FileType
= TAB_UNKNOWN_FILE
911 if self
._BuildTargets
is None:
912 self
._IntroBuildTargetList
= set()
913 self
._FinalBuildTargetList
= set()
914 self
._BuildTargets
= defaultdict(set)
915 self
._FileTypes
= defaultdict(set)
917 #TRICK: call SourceFileList property to apply build rule for source files
920 #TRICK: call _GetBinaryFileList to apply build rule for binary files
923 return self
._BuildTargets
926 def IntroTargetList(self
):
928 return self
._IntroBuildTargetList
931 def CodaTargetList(self
):
933 return self
._FinalBuildTargetList
938 return self
._FileTypes
940 ## Get the list of package object the module depends on
942 # @retval list The package object list
945 def DependentPackageList(self
):
946 return self
.Module
.Packages
948 ## Return the list of auto-generated code file
950 # @retval list The list of auto-generated file
953 def AutoGenFileList(self
):
954 AutoGenUniIdf
= self
.BuildType
!= 'UEFI_HII'
955 UniStringBinBuffer
= BytesIO()
956 IdfGenBinBuffer
= BytesIO()
958 AutoGenC
= TemplateString()
959 AutoGenH
= TemplateString()
960 StringH
= TemplateString()
961 StringIdf
= TemplateString()
962 GenC
.CreateCode(self
, AutoGenC
, AutoGenH
, StringH
, AutoGenUniIdf
, UniStringBinBuffer
, StringIdf
, AutoGenUniIdf
, IdfGenBinBuffer
)
964 # AutoGen.c is generated if there are library classes in inf, or there are object files
966 if str(AutoGenC
) != "" and (len(self
.Module
.LibraryClasses
) > 0
967 or TAB_OBJECT_FILE
in self
.FileTypes
):
968 AutoFile
= PathClass(gAutoGenCodeFileName
, self
.DebugDir
)
969 RetVal
[AutoFile
] = str(AutoGenC
)
970 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
971 if str(AutoGenH
) != "":
972 AutoFile
= PathClass(gAutoGenHeaderFileName
, self
.DebugDir
)
973 RetVal
[AutoFile
] = str(AutoGenH
)
974 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
975 if str(StringH
) != "":
976 AutoFile
= PathClass(gAutoGenStringFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
977 RetVal
[AutoFile
] = str(StringH
)
978 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
979 if UniStringBinBuffer
is not None and UniStringBinBuffer
.getvalue() != b
"":
980 AutoFile
= PathClass(gAutoGenStringFormFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
981 RetVal
[AutoFile
] = UniStringBinBuffer
.getvalue()
982 AutoFile
.IsBinary
= True
983 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
984 if UniStringBinBuffer
is not None:
985 UniStringBinBuffer
.close()
986 if str(StringIdf
) != "":
987 AutoFile
= PathClass(gAutoGenImageDefFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
988 RetVal
[AutoFile
] = str(StringIdf
)
989 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
990 if IdfGenBinBuffer
is not None and IdfGenBinBuffer
.getvalue() != b
"":
991 AutoFile
= PathClass(gAutoGenIdfFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
992 RetVal
[AutoFile
] = IdfGenBinBuffer
.getvalue()
993 AutoFile
.IsBinary
= True
994 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
995 if IdfGenBinBuffer
is not None:
996 IdfGenBinBuffer
.close()
999 ## Return the list of library modules explicitly or implicitly used by this module
1001 def DependentLibraryList(self
):
1002 # only merge library classes and PCD for non-library module
1005 return self
.PlatformInfo
.ApplyLibraryInstance(self
.Module
)
1007 ## Get the list of PCDs from current module
1009 # @retval list The list of PCD
1012 def ModulePcdList(self
):
1013 # apply PCD settings from platform
1014 RetVal
= self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, self
.Module
.Pcds
)
1018 def _PcdComments(self
):
1019 ReVal
= OrderedListDict()
1020 ExtendCopyDictionaryLists(ReVal
, self
.Module
.PcdComments
)
1021 if not self
.IsLibrary
:
1022 for Library
in self
.DependentLibraryList
:
1023 ExtendCopyDictionaryLists(ReVal
, Library
.PcdComments
)
1026 ## Get the list of PCDs from dependent libraries
1028 # @retval list The list of PCD
1031 def LibraryPcdList(self
):
1036 # get PCDs from dependent libraries
1037 for Library
in self
.DependentLibraryList
:
1038 PcdsInLibrary
= OrderedDict()
1039 for Key
in Library
.Pcds
:
1040 # skip duplicated PCDs
1041 if Key
in self
.Module
.Pcds
or Key
in Pcds
:
1044 PcdsInLibrary
[Key
] = copy
.copy(Library
.Pcds
[Key
])
1045 RetVal
.extend(self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, PcdsInLibrary
, Library
=Library
))
1048 ## Get the GUID value mapping
1050 # @retval dict The mapping between GUID cname and its value
1054 RetVal
= self
.Module
.Guids
1055 for Library
in self
.DependentLibraryList
:
1056 RetVal
.update(Library
.Guids
)
1057 ExtendCopyDictionaryLists(self
._GuidComments
, Library
.GuidComments
)
1058 ExtendCopyDictionaryLists(self
._GuidComments
, self
.Module
.GuidComments
)
1062 def GetGuidsUsedByPcd(self
):
1063 RetVal
= OrderedDict(self
.Module
.GetGuidsUsedByPcd())
1064 for Library
in self
.DependentLibraryList
:
1065 RetVal
.update(Library
.GetGuidsUsedByPcd())
1067 ## Get the protocol value mapping
1069 # @retval dict The mapping between protocol cname and its value
1072 def ProtocolList(self
):
1073 RetVal
= OrderedDict(self
.Module
.Protocols
)
1074 for Library
in self
.DependentLibraryList
:
1075 RetVal
.update(Library
.Protocols
)
1076 ExtendCopyDictionaryLists(self
._ProtocolComments
, Library
.ProtocolComments
)
1077 ExtendCopyDictionaryLists(self
._ProtocolComments
, self
.Module
.ProtocolComments
)
1080 ## Get the PPI value mapping
1082 # @retval dict The mapping between PPI cname and its value
1086 RetVal
= OrderedDict(self
.Module
.Ppis
)
1087 for Library
in self
.DependentLibraryList
:
1088 RetVal
.update(Library
.Ppis
)
1089 ExtendCopyDictionaryLists(self
._PpiComments
, Library
.PpiComments
)
1090 ExtendCopyDictionaryLists(self
._PpiComments
, self
.Module
.PpiComments
)
1093 ## Get the list of include search path
1095 # @retval list The list path
1098 def IncludePathList(self
):
1100 RetVal
.append(self
.MetaFile
.Dir
)
1101 RetVal
.append(self
.DebugDir
)
1103 for Package
in self
.Module
.Packages
:
1104 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1105 if PackageDir
not in RetVal
:
1106 RetVal
.append(PackageDir
)
1107 IncludesList
= Package
.Includes
1108 if Package
._PrivateIncludes
:
1109 if not self
.MetaFile
.OriginalPath
.Path
.startswith(PackageDir
):
1110 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1111 for Inc
in IncludesList
:
1112 if Inc
not in RetVal
:
1113 RetVal
.append(str(Inc
))
1117 def IncludePathLength(self
):
1118 return sum(len(inc
)+1 for inc
in self
.IncludePathList
)
1120 ## Get the list of include paths from the packages
1122 # @IncludesList list The list path
1125 def PackageIncludePathList(self
):
1127 for Package
in self
.Module
.Packages
:
1128 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1129 IncludesList
= Package
.Includes
1130 if Package
._PrivateIncludes
:
1131 if not self
.MetaFile
.Path
.startswith(PackageDir
):
1132 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1135 ## Get HII EX PCDs which maybe used by VFR
1137 # efivarstore used by VFR may relate with HII EX PCDs
1138 # Get the variable name and GUID from efivarstore and HII EX PCD
1139 # List the HII EX PCDs in As Built INF if both name and GUID match.
1141 # @retval list HII EX PCDs
1143 def _GetPcdsMaybeUsedByVfr(self
):
1144 if not self
.SourceFileList
:
1148 for SrcFile
in self
.SourceFileList
:
1149 if SrcFile
.Ext
.lower() != '.vfr':
1151 Vfri
= os
.path
.join(self
.OutputDir
, SrcFile
.BaseName
+ '.i')
1152 if not os
.path
.exists(Vfri
):
1154 VfriFile
= open(Vfri
, 'r')
1155 Content
= VfriFile
.read()
1157 Pos
= Content
.find('efivarstore')
1160 # Make sure 'efivarstore' is the start of efivarstore statement
1161 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1164 while Index
>= 0 and Content
[Index
] in ' \t\r\n':
1166 if Index
>= 0 and Content
[Index
] != ';':
1167 Pos
= Content
.find('efivarstore', Pos
+ len('efivarstore'))
1170 # 'efivarstore' must be followed by name and guid
1172 Name
= gEfiVarStoreNamePattern
.search(Content
, Pos
)
1175 Guid
= gEfiVarStoreGuidPattern
.search(Content
, Pos
)
1178 NameArray
= _ConvertStringToByteArray('L"' + Name
.group(1) + '"')
1179 NameGuids
.add((NameArray
, GuidStructureStringToGuidString(Guid
.group(1))))
1180 Pos
= Content
.find('efivarstore', Name
.end())
1184 for Pcd
in self
.PlatformInfo
.Pcds
.values():
1185 if Pcd
.Type
!= TAB_PCDS_DYNAMIC_EX_HII
:
1187 for SkuInfo
in Pcd
.SkuInfoList
.values():
1188 Value
= GuidValue(SkuInfo
.VariableGuid
, self
.PlatformInfo
.PackageList
, self
.MetaFile
.Path
)
1191 Name
= _ConvertStringToByteArray(SkuInfo
.VariableName
)
1192 Guid
= GuidStructureStringToGuidString(Value
)
1193 if (Name
, Guid
) in NameGuids
and Pcd
not in HiiExPcds
:
1194 HiiExPcds
.append(Pcd
)
1199 def _GenOffsetBin(self
):
1201 for SourceFile
in self
.Module
.Sources
:
1202 if SourceFile
.Type
.upper() == ".VFR" :
1204 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1206 VfrUniBaseName
[SourceFile
.BaseName
] = (SourceFile
.BaseName
+ "Bin")
1207 elif SourceFile
.Type
.upper() == ".UNI" :
1209 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1211 VfrUniBaseName
["UniOffsetName"] = (self
.Name
+ "Strings")
1213 if not VfrUniBaseName
:
1215 MapFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".map")
1216 EfiFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".efi")
1217 VfrUniOffsetList
= GetVariableOffset(MapFileName
, EfiFileName
, list(VfrUniBaseName
.values()))
1218 if not VfrUniOffsetList
:
1221 OutputName
= '%sOffset.bin' % self
.Name
1222 UniVfrOffsetFileName
= os
.path
.join( self
.OutputDir
, OutputName
)
1225 fInputfile
= open(UniVfrOffsetFileName
, "wb+", 0)
1227 EdkLogger
.error("build", FILE_OPEN_FAILURE
, "File open failed for %s" % UniVfrOffsetFileName
, None)
1229 # Use a instance of BytesIO to cache data
1230 fStringIO
= BytesIO()
1232 for Item
in VfrUniOffsetList
:
1233 if (Item
[0].find("Strings") != -1):
1235 # UNI offset in image.
1237 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1239 UniGuid
= b
'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1240 fStringIO
.write(UniGuid
)
1241 UniValue
= pack ('Q', int (Item
[1], 16))
1242 fStringIO
.write (UniValue
)
1245 # VFR binary offset in image.
1247 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1249 VfrGuid
= b
'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1250 fStringIO
.write(VfrGuid
)
1251 VfrValue
= pack ('Q', int (Item
[1], 16))
1252 fStringIO
.write (VfrValue
)
1254 # write data into file.
1257 fInputfile
.write (fStringIO
.getvalue())
1259 EdkLogger
.error("build", FILE_WRITE_FAILURE
, "Write data to file %s failed, please check whether the "
1260 "file been locked or using by other applications." %UniVfrOffsetFileName
, None)
1267 def OutputFile(self
):
1269 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1270 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1271 FfsOutputDir
= self
.FfsOutputDir
.replace('\\', '/').rstrip('/')
1272 for Item
in self
.CodaTargetList
:
1273 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1275 if self
.DepexGenerated
:
1276 retVal
.add(self
.Name
+ '.depex')
1278 Bin
= self
._GenOffsetBin
()
1282 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1284 if File
.lower().endswith('.pdb'):
1287 for Root
, Dirs
, Files
in os
.walk(FfsOutputDir
):
1289 if File
.lower().endswith('.ffs') or File
.lower().endswith('.offset') or File
.lower().endswith('.raw') \
1290 or File
.lower().endswith('.raw.txt'):
1295 ## Create AsBuilt INF file the module
1297 def CreateAsBuiltInf(self
):
1299 if self
.IsAsBuiltInfCreated
:
1302 # Skip INF file generation for libraries
1306 # Skip the following code for modules with no source files
1307 if not self
.SourceFileList
:
1310 # Skip the following code for modules without any binary files
1311 if self
.BinaryFileList
:
1314 ### TODO: How to handles mixed source and binary modules
1316 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1317 # Also find all packages that the DynamicEx PCDs depend on
1322 PcdTokenSpaceList
= []
1323 for Pcd
in self
.ModulePcdList
+ self
.LibraryPcdList
:
1324 if Pcd
.Type
== TAB_PCDS_PATCHABLE_IN_MODULE
:
1325 PatchablePcds
.append(Pcd
)
1326 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_PATCHABLE_IN_MODULE
))
1327 elif Pcd
.Type
in PCD_DYNAMIC_EX_TYPE_SET
:
1330 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
))
1331 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
))
1332 PcdTokenSpaceList
.append(Pcd
.TokenSpaceGuidCName
)
1333 GuidList
= OrderedDict(self
.GuidList
)
1334 for TokenSpace
in self
.GetGuidsUsedByPcd
:
1335 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1336 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1337 if TokenSpace
not in PcdTokenSpaceList
and TokenSpace
in GuidList
:
1338 GuidList
.pop(TokenSpace
)
1339 CheckList
= (GuidList
, self
.PpiList
, self
.ProtocolList
, PcdCheckList
)
1340 for Package
in self
.DerivedPackageList
:
1341 if Package
in Packages
:
1343 BeChecked
= (Package
.Guids
, Package
.Ppis
, Package
.Protocols
, Package
.Pcds
)
1345 for Index
in range(len(BeChecked
)):
1346 for Item
in CheckList
[Index
]:
1347 if Item
in BeChecked
[Index
]:
1348 Packages
.append(Package
)
1354 VfrPcds
= self
._GetPcdsMaybeUsedByVfr
()
1355 for Pkg
in self
.PlatformInfo
.PackageList
:
1358 for VfrPcd
in VfrPcds
:
1359 if ((VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
) in Pkg
.Pcds
or
1360 (VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
) in Pkg
.Pcds
):
1361 Packages
.append(Pkg
)
1364 ModuleType
= SUP_MODULE_DXE_DRIVER
if self
.ModuleType
== SUP_MODULE_UEFI_DRIVER
and self
.DepexGenerated
else self
.ModuleType
1365 DriverType
= self
.PcdIsDriver
if self
.PcdIsDriver
else ''
1367 MDefs
= self
.Module
.Defines
1370 'module_name' : self
.Name
,
1371 'module_guid' : Guid
,
1372 'module_module_type' : ModuleType
,
1373 'module_version_string' : [MDefs
['VERSION_STRING']] if 'VERSION_STRING' in MDefs
else [],
1374 'pcd_is_driver_string' : [],
1375 'module_uefi_specification_version' : [],
1376 'module_pi_specification_version' : [],
1377 'module_entry_point' : self
.Module
.ModuleEntryPointList
,
1378 'module_unload_image' : self
.Module
.ModuleUnloadImageList
,
1379 'module_constructor' : self
.Module
.ConstructorList
,
1380 'module_destructor' : self
.Module
.DestructorList
,
1381 'module_shadow' : [MDefs
['SHADOW']] if 'SHADOW' in MDefs
else [],
1382 'module_pci_vendor_id' : [MDefs
['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs
else [],
1383 'module_pci_device_id' : [MDefs
['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs
else [],
1384 'module_pci_class_code' : [MDefs
['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs
else [],
1385 'module_pci_revision' : [MDefs
['PCI_REVISION']] if 'PCI_REVISION' in MDefs
else [],
1386 'module_build_number' : [MDefs
['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs
else [],
1387 'module_spec' : [MDefs
['SPEC']] if 'SPEC' in MDefs
else [],
1388 'module_uefi_hii_resource_section' : [MDefs
['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs
else [],
1389 'module_uni_file' : [MDefs
['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs
else [],
1390 'module_arch' : self
.Arch
,
1391 'package_item' : [Package
.MetaFile
.File
.replace('\\', '/') for Package
in Packages
],
1393 'patchablepcd_item' : [],
1395 'protocol_item' : [],
1399 'libraryclasses_item' : []
1402 if 'MODULE_UNI_FILE' in MDefs
:
1403 UNIFile
= os
.path
.join(self
.MetaFile
.Dir
, MDefs
['MODULE_UNI_FILE'])
1404 if os
.path
.isfile(UNIFile
):
1405 shutil
.copy2(UNIFile
, self
.OutputDir
)
1407 if self
.AutoGenVersion
> int(gInfSpecVersion
, 0):
1408 AsBuiltInfDict
['module_inf_version'] = '0x%08x' % self
.AutoGenVersion
1410 AsBuiltInfDict
['module_inf_version'] = gInfSpecVersion
1413 AsBuiltInfDict
['pcd_is_driver_string'].append(DriverType
)
1415 if 'UEFI_SPECIFICATION_VERSION' in self
.Specification
:
1416 AsBuiltInfDict
['module_uefi_specification_version'].append(self
.Specification
['UEFI_SPECIFICATION_VERSION'])
1417 if 'PI_SPECIFICATION_VERSION' in self
.Specification
:
1418 AsBuiltInfDict
['module_pi_specification_version'].append(self
.Specification
['PI_SPECIFICATION_VERSION'])
1420 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1421 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1422 for Item
in self
.CodaTargetList
:
1423 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1424 if os
.path
.isabs(File
):
1425 File
= File
.replace('\\', '/').strip('/').replace(OutputDir
, '').strip('/')
1426 if Item
.Target
.Ext
.lower() == '.aml':
1427 AsBuiltInfDict
['binary_item'].append('ASL|' + File
)
1428 elif Item
.Target
.Ext
.lower() == '.acpi':
1429 AsBuiltInfDict
['binary_item'].append('ACPI|' + File
)
1430 elif Item
.Target
.Ext
.lower() == '.efi':
1431 AsBuiltInfDict
['binary_item'].append('PE32|' + self
.Name
+ '.efi')
1433 AsBuiltInfDict
['binary_item'].append('BIN|' + File
)
1434 if not self
.DepexGenerated
:
1435 DepexFile
= os
.path
.join(self
.OutputDir
, self
.Name
+ '.depex')
1436 if os
.path
.exists(DepexFile
):
1437 self
.DepexGenerated
= True
1438 if self
.DepexGenerated
:
1439 if self
.ModuleType
in [SUP_MODULE_PEIM
]:
1440 AsBuiltInfDict
['binary_item'].append('PEI_DEPEX|' + self
.Name
+ '.depex')
1441 elif self
.ModuleType
in [SUP_MODULE_DXE_DRIVER
, SUP_MODULE_DXE_RUNTIME_DRIVER
, SUP_MODULE_DXE_SAL_DRIVER
, SUP_MODULE_UEFI_DRIVER
]:
1442 AsBuiltInfDict
['binary_item'].append('DXE_DEPEX|' + self
.Name
+ '.depex')
1443 elif self
.ModuleType
in [SUP_MODULE_DXE_SMM_DRIVER
]:
1444 AsBuiltInfDict
['binary_item'].append('SMM_DEPEX|' + self
.Name
+ '.depex')
1446 Bin
= self
._GenOffsetBin
()
1448 AsBuiltInfDict
['binary_item'].append('BIN|%s' % Bin
)
1450 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1452 if File
.lower().endswith('.pdb'):
1453 AsBuiltInfDict
['binary_item'].append('DISPOSABLE|' + File
)
1454 HeaderComments
= self
.Module
.HeaderComments
1456 for Index
in range(len(HeaderComments
)):
1457 if HeaderComments
[Index
].find('@BinaryHeader') != -1:
1458 HeaderComments
[Index
] = HeaderComments
[Index
].replace('@BinaryHeader', '@file')
1461 AsBuiltInfDict
['header_comments'] = '\n'.join(HeaderComments
[StartPos
:]).replace(':#', '://')
1462 AsBuiltInfDict
['tail_comments'] = '\n'.join(self
.Module
.TailComments
)
1465 (self
.ProtocolList
, self
._ProtocolComments
, 'protocol_item'),
1466 (self
.PpiList
, self
._PpiComments
, 'ppi_item'),
1467 (GuidList
, self
._GuidComments
, 'guid_item')
1469 for Item
in GenList
:
1470 for CName
in Item
[0]:
1471 Comments
= '\n '.join(Item
[1][CName
]) if CName
in Item
[1] else ''
1472 Entry
= Comments
+ '\n ' + CName
if Comments
else CName
1473 AsBuiltInfDict
[Item
[2]].append(Entry
)
1474 PatchList
= parsePcdInfoFromMapFile(
1475 os
.path
.join(self
.OutputDir
, self
.Name
+ '.map'),
1476 os
.path
.join(self
.OutputDir
, self
.Name
+ '.efi')
1479 for Pcd
in PatchablePcds
:
1480 TokenCName
= Pcd
.TokenCName
1481 for PcdItem
in GlobalData
.MixedPcd
:
1482 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1483 TokenCName
= PcdItem
[0]
1485 for PatchPcd
in PatchList
:
1486 if TokenCName
== PatchPcd
[0]:
1491 if Pcd
.DatumType
== 'BOOLEAN':
1492 BoolValue
= Pcd
.DefaultValue
.upper()
1493 if BoolValue
== 'TRUE':
1494 Pcd
.DefaultValue
= '1'
1495 elif BoolValue
== 'FALSE':
1496 Pcd
.DefaultValue
= '0'
1498 if Pcd
.DatumType
in TAB_PCD_NUMERIC_TYPES
:
1499 HexFormat
= '0x%02x'
1500 if Pcd
.DatumType
== TAB_UINT16
:
1501 HexFormat
= '0x%04x'
1502 elif Pcd
.DatumType
== TAB_UINT32
:
1503 HexFormat
= '0x%08x'
1504 elif Pcd
.DatumType
== TAB_UINT64
:
1505 HexFormat
= '0x%016x'
1506 PcdValue
= HexFormat
% int(Pcd
.DefaultValue
, 0)
1508 if Pcd
.MaxDatumSize
is None or Pcd
.MaxDatumSize
== '':
1509 EdkLogger
.error("build", AUTOGEN_ERROR
,
1510 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1512 ArraySize
= int(Pcd
.MaxDatumSize
, 0)
1513 PcdValue
= Pcd
.DefaultValue
1514 if PcdValue
[0] != '{':
1516 if PcdValue
[0] == 'L':
1518 PcdValue
= PcdValue
.lstrip('L')
1519 PcdValue
= eval(PcdValue
)
1521 for Index
in range(0, len(PcdValue
)):
1523 CharVal
= ord(PcdValue
[Index
])
1524 NewValue
= NewValue
+ '0x%02x' % (CharVal
& 0x00FF) + ', ' \
1525 + '0x%02x' % (CharVal
>> 8) + ', '
1527 NewValue
= NewValue
+ '0x%02x' % (ord(PcdValue
[Index
]) % 0x100) + ', '
1530 Padding
= Padding
* 2
1531 ArraySize
= ArraySize
// 2
1532 if ArraySize
< (len(PcdValue
) + 1):
1533 if Pcd
.MaxSizeUserSet
:
1534 EdkLogger
.error("build", AUTOGEN_ERROR
,
1535 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1538 ArraySize
= len(PcdValue
) + 1
1539 if ArraySize
> len(PcdValue
) + 1:
1540 NewValue
= NewValue
+ Padding
* (ArraySize
- len(PcdValue
) - 1)
1541 PcdValue
= NewValue
+ Padding
.strip().rstrip(',') + '}'
1542 elif len(PcdValue
.split(',')) <= ArraySize
:
1543 PcdValue
= PcdValue
.rstrip('}') + ', 0x00' * (ArraySize
- len(PcdValue
.split(',')))
1546 if Pcd
.MaxSizeUserSet
:
1547 EdkLogger
.error("build", AUTOGEN_ERROR
,
1548 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1551 ArraySize
= len(PcdValue
) + 1
1552 PcdItem
= '%s.%s|%s|0x%X' % \
1553 (Pcd
.TokenSpaceGuidCName
, TokenCName
, PcdValue
, PatchPcd
[1])
1555 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1556 PcdComments
= '\n '.join(self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
])
1558 PcdItem
= PcdComments
+ '\n ' + PcdItem
1559 AsBuiltInfDict
['patchablepcd_item'].append(PcdItem
)
1561 for Pcd
in Pcds
+ VfrPcds
:
1564 TokenCName
= Pcd
.TokenCName
1565 for PcdItem
in GlobalData
.MixedPcd
:
1566 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1567 TokenCName
= PcdItem
[0]
1569 if Pcd
.Type
== TAB_PCDS_DYNAMIC_EX_HII
:
1570 for SkuName
in Pcd
.SkuInfoList
:
1571 SkuInfo
= Pcd
.SkuInfoList
[SkuName
]
1572 HiiInfo
= '## %s|%s|%s' % (SkuInfo
.VariableName
, SkuInfo
.VariableGuid
, SkuInfo
.VariableOffset
)
1574 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1575 PcdCommentList
= self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
][:]
1579 for Index
, Comment
in enumerate(PcdCommentList
):
1580 for Usage
in UsageList
:
1581 if Comment
.find(Usage
) != -1:
1585 if UsageIndex
!= -1:
1586 PcdCommentList
[UsageIndex
] = '## %s %s %s' % (UsageStr
, HiiInfo
, PcdCommentList
[UsageIndex
].replace(UsageStr
, ''))
1588 PcdCommentList
.append('## UNDEFINED ' + HiiInfo
)
1589 PcdComments
= '\n '.join(PcdCommentList
)
1590 PcdEntry
= Pcd
.TokenSpaceGuidCName
+ '.' + TokenCName
1592 PcdEntry
= PcdComments
+ '\n ' + PcdEntry
1593 AsBuiltInfDict
['pcd_item'].append(PcdEntry
)
1594 for Item
in self
.BuildOption
:
1595 if 'FLAGS' in self
.BuildOption
[Item
]:
1596 AsBuiltInfDict
['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self
.ToolChainFamily
, self
.BuildTarget
, self
.ToolChain
, self
.Arch
, Item
, self
.BuildOption
[Item
]['FLAGS'].strip()))
1598 # Generated LibraryClasses section in comments.
1599 for Library
in self
.LibraryAutoGenList
:
1600 AsBuiltInfDict
['libraryclasses_item'].append(Library
.MetaFile
.File
.replace('\\', '/'))
1602 # Generated UserExtensions TianoCore section.
1603 # All tianocore user extensions are copied.
1605 for TianoCore
in self
._GetTianoCoreUserExtensionList
():
1606 UserExtStr
+= '\n'.join(TianoCore
)
1607 ExtensionFile
= os
.path
.join(self
.MetaFile
.Dir
, TianoCore
[1])
1608 if os
.path
.isfile(ExtensionFile
):
1609 shutil
.copy2(ExtensionFile
, self
.OutputDir
)
1610 AsBuiltInfDict
['userextension_tianocore_item'] = UserExtStr
1612 # Generated depex expression section in comments.
1613 DepexExpression
= self
._GetDepexExpresionString
()
1614 AsBuiltInfDict
['depexsection_item'] = DepexExpression
if DepexExpression
else ''
1616 AsBuiltInf
= TemplateString()
1617 AsBuiltInf
.Append(gAsBuiltInfHeaderString
.Replace(AsBuiltInfDict
))
1619 SaveFileOnChange(os
.path
.join(self
.OutputDir
, self
.Name
+ '.inf'), str(AsBuiltInf
), False)
1621 self
.IsAsBuiltInfCreated
= True
1623 def CacheCopyFile(self
, OriginDir
, CopyDir
, File
):
1624 sub_dir
= os
.path
.relpath(File
, CopyDir
)
1625 destination_file
= os
.path
.join(OriginDir
, sub_dir
)
1626 destination_dir
= os
.path
.dirname(destination_file
)
1627 CreateDirectory(destination_dir
)
1629 CopyFileOnChange(File
, destination_dir
)
1631 EdkLogger
.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File
, destination_dir
))
1634 def CopyModuleToCache(self
):
1635 self
.GenPreMakefileHash(GlobalData
.gCacheIR
)
1636 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1637 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1638 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1641 self
.GenMakeHash(GlobalData
.gCacheIR
)
1642 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1643 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1644 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1645 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1648 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1649 FileDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
, MakeHashStr
)
1650 FfsDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
, MakeHashStr
)
1652 CreateDirectory (FileDir
)
1653 self
.SaveHashChainFileToCache(GlobalData
.gCacheIR
)
1654 ModuleFile
= path
.join(self
.OutputDir
, self
.Name
+ '.inf')
1655 if os
.path
.exists(ModuleFile
):
1656 CopyFileOnChange(ModuleFile
, FileDir
)
1657 if not self
.OutputFile
:
1658 Ma
= self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
1659 self
.OutputFile
= Ma
.Binaries
1660 for File
in self
.OutputFile
:
1662 if not os
.path
.isabs(File
):
1663 NewFile
= os
.path
.join(self
.OutputDir
, File
)
1664 if not os
.path
.exists(NewFile
):
1665 NewFile
= os
.path
.join(self
.FfsOutputDir
, File
)
1667 if os
.path
.exists(File
):
1668 if File
.lower().endswith('.ffs') or File
.lower().endswith('.offset') or File
.lower().endswith('.raw') \
1669 or File
.lower().endswith('.raw.txt'):
1670 self
.CacheCopyFile(FfsDir
, self
.FfsOutputDir
, File
)
1672 self
.CacheCopyFile(FileDir
, self
.OutputDir
, File
)
1674 def SaveHashChainFileToCache(self
, gDict
):
1675 if not GlobalData
.gBinCacheDest
:
1678 self
.GenPreMakefileHash(gDict
)
1679 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1680 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1681 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1684 self
.GenMakeHash(gDict
)
1685 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1686 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1687 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1688 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1691 # save the hash chain list as cache file
1692 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1693 CacheDestDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
1694 CacheHashDestDir
= path
.join(CacheDestDir
, MakeHashStr
)
1695 ModuleHashPair
= path
.join(CacheDestDir
, self
.Name
+ ".ModuleHashPair")
1696 MakeHashChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".MakeHashChain")
1697 ModuleFilesChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".ModuleFilesChain")
1699 # save the HashChainDict as json file
1700 CreateDirectory (CacheDestDir
)
1701 CreateDirectory (CacheHashDestDir
)
1703 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1704 if os
.path
.exists(ModuleHashPair
):
1705 f
= open(ModuleHashPair
, 'r')
1706 ModuleHashPairList
= json
.load(f
)
1708 PreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
1709 MakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
1710 ModuleHashPairList
.append((PreMakeHash
, MakeHash
))
1711 ModuleHashPairList
= list(set(map(tuple, ModuleHashPairList
)))
1712 with
open(ModuleHashPair
, 'w') as f
:
1713 json
.dump(ModuleHashPairList
, f
, indent
=2)
1715 EdkLogger
.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair
)
1719 with
open(MakeHashChain
, 'w') as f
:
1720 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
, f
, indent
=2)
1722 EdkLogger
.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain
)
1726 with
open(ModuleFilesChain
, 'w') as f
:
1727 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
, f
, indent
=2)
1729 EdkLogger
.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain
)
1732 # save the autogenfile and makefile for debug usage
1733 CacheDebugDir
= path
.join(CacheHashDestDir
, "CacheDebug")
1734 CreateDirectory (CacheDebugDir
)
1735 CopyFileOnChange(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
, CacheDebugDir
)
1736 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1737 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1738 CopyFileOnChange(str(File
), CacheDebugDir
)
1742 ## Create makefile for the module and its dependent libraries
1744 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1745 # dependent libraries will be created
1747 @cached_class_function
1748 def CreateMakeFile(self
, CreateLibraryMakeFile
=True, GenFfsList
= []):
1749 gDict
= GlobalData
.gCacheIR
1750 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1751 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
1754 # nest this function inside it's only caller.
1755 def CreateTimeStamp():
1756 FileSet
= {self
.MetaFile
.Path
}
1758 for SourceFile
in self
.Module
.Sources
:
1759 FileSet
.add (SourceFile
.Path
)
1761 for Lib
in self
.DependentLibraryList
:
1762 FileSet
.add (Lib
.MetaFile
.Path
)
1764 for f
in self
.AutoGenDepSet
:
1765 FileSet
.add (f
.Path
)
1767 if os
.path
.exists (self
.TimeStampPath
):
1768 os
.remove (self
.TimeStampPath
)
1769 with
open(self
.TimeStampPath
, 'w+') as fd
:
1774 # Ignore generating makefile when it is a binary module
1775 if self
.IsBinaryModule
:
1778 self
.GenFfsList
= GenFfsList
1780 if not self
.IsLibrary
and CreateLibraryMakeFile
:
1781 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1782 LibraryAutoGen
.CreateMakeFile()
1784 # CanSkip uses timestamps to determine build skipping
1788 if len(self
.CustomMakefile
) == 0:
1789 Makefile
= GenMake
.ModuleMakefile(self
)
1791 Makefile
= GenMake
.CustomMakefile(self
)
1792 if Makefile
.Generate():
1793 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated makefile for module %s [%s]" %
1794 (self
.Name
, self
.Arch
))
1796 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of makefile for module %s [%s]" %
1797 (self
.Name
, self
.Arch
))
1801 MakefileType
= Makefile
._FileType
1802 MakefileName
= Makefile
._FILE
_NAME
_[MakefileType
]
1803 MakefilePath
= os
.path
.join(self
.MakeFileDir
, MakefileName
)
1805 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1806 MewIR
.MakefilePath
= MakefilePath
1807 MewIR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1808 MewIR
.CreateMakeFileDone
= True
1809 with GlobalData
.file_lock
:
1811 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1812 IR
.MakefilePath
= MakefilePath
1813 IR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1814 IR
.CreateMakeFileDone
= True
1815 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1817 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1819 def CopyBinaryFiles(self
):
1820 for File
in self
.Module
.Binaries
:
1822 DstPath
= os
.path
.join(self
.OutputDir
, os
.path
.basename(SrcPath
))
1823 CopyLongFilePath(SrcPath
, DstPath
)
1824 ## Create autogen code for the module and its dependent libraries
1826 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1827 # dependent libraries will be created
1829 def CreateCodeFile(self
, CreateLibraryCodeFile
=True):
1830 gDict
= GlobalData
.gCacheIR
1831 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1832 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
1835 if self
.IsCodeFileCreated
:
1838 # Need to generate PcdDatabase even PcdDriver is binarymodule
1839 if self
.IsBinaryModule
and self
.PcdIsDriver
!= '':
1840 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
1842 if self
.IsBinaryModule
:
1844 self
.CopyBinaryFiles()
1847 if not self
.IsLibrary
and CreateLibraryCodeFile
:
1848 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1849 LibraryAutoGen
.CreateCodeFile()
1851 # CanSkip uses timestamps to determine build skipping
1856 IgoredAutoGenList
= []
1858 for File
in self
.AutoGenFileList
:
1859 if GenC
.Generate(File
.Path
, self
.AutoGenFileList
[File
], File
.IsBinary
):
1860 AutoGenList
.append(str(File
))
1862 IgoredAutoGenList
.append(str(File
))
1865 for ModuleType
in self
.DepexList
:
1866 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1867 if len(self
.DepexList
[ModuleType
]) == 0 or ModuleType
== SUP_MODULE_USER_DEFINED
or ModuleType
== SUP_MODULE_HOST_APPLICATION
:
1870 Dpx
= GenDepex
.DependencyExpression(self
.DepexList
[ModuleType
], ModuleType
, True)
1871 DpxFile
= gAutoGenDepexFileName
% {"module_name" : self
.Name
}
1873 if len(Dpx
.PostfixNotation
) != 0:
1874 self
.DepexGenerated
= True
1876 if Dpx
.Generate(path
.join(self
.OutputDir
, DpxFile
)):
1877 AutoGenList
.append(str(DpxFile
))
1879 IgoredAutoGenList
.append(str(DpxFile
))
1881 if IgoredAutoGenList
== []:
1882 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] files for module %s [%s]" %
1883 (" ".join(AutoGenList
), self
.Name
, self
.Arch
))
1884 elif AutoGenList
== []:
1885 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of [%s] files for module %s [%s]" %
1886 (" ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1888 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] (skipped %s) files for module %s [%s]" %
1889 (" ".join(AutoGenList
), " ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1891 self
.IsCodeFileCreated
= True
1892 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1893 MewIR
.CreateCodeFileDone
= True
1894 with GlobalData
.file_lock
:
1896 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1897 IR
.CreateCodeFileDone
= True
1898 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1900 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1904 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1906 def LibraryAutoGenList(self
):
1908 for Library
in self
.DependentLibraryList
:
1915 self
.PlatformInfo
.MetaFile
,
1919 if La
not in RetVal
:
1921 for Lib
in La
.CodaTargetList
:
1922 self
._ApplyBuildRule
(Lib
.Target
, TAB_UNKNOWN_FILE
)
1925 def GenModuleHash(self
):
1926 # Initialize a dictionary for each arch type
1927 if self
.Arch
not in GlobalData
.gModuleHash
:
1928 GlobalData
.gModuleHash
[self
.Arch
] = {}
1930 # Early exit if module or library has been hashed and is in memory
1931 if self
.Name
in GlobalData
.gModuleHash
[self
.Arch
]:
1932 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1934 # Initialze hash object
1937 # Add Platform level hash
1938 m
.update(GlobalData
.gPlatformHash
.encode('utf-8'))
1940 # Add Package level hash
1941 if self
.DependentPackageList
:
1942 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
1943 if Pkg
.PackageName
in GlobalData
.gPackageHash
:
1944 m
.update(GlobalData
.gPackageHash
[Pkg
.PackageName
].encode('utf-8'))
1947 if self
.LibraryAutoGenList
:
1948 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
1949 if Lib
.Name
not in GlobalData
.gModuleHash
[self
.Arch
]:
1951 m
.update(GlobalData
.gModuleHash
[self
.Arch
][Lib
.Name
].encode('utf-8'))
1954 f
= open(str(self
.MetaFile
), 'rb')
1959 # Add Module's source files
1960 if self
.SourceFileList
:
1961 for File
in sorted(self
.SourceFileList
, key
=lambda x
: str(x
)):
1962 f
= open(str(File
), 'rb')
1967 GlobalData
.gModuleHash
[self
.Arch
][self
.Name
] = m
.hexdigest()
1969 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1971 def GenModuleFilesHash(self
, gDict
):
1972 # Early exit if module or library has been hashed and is in memory
1973 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
1974 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
:
1975 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1977 DependencyFileSet
= set()
1978 # Add Module Meta file
1979 DependencyFileSet
.add(self
.MetaFile
)
1981 # Add Module's source files
1982 if self
.SourceFileList
:
1983 for File
in set(self
.SourceFileList
):
1984 DependencyFileSet
.add(File
)
1986 # Add modules's include header files
1987 # Search dependency file list for each source file
1990 for Target
in self
.IntroTargetList
:
1991 SourceFileList
.extend(Target
.Inputs
)
1992 OutPutFileList
.extend(Target
.Outputs
)
1994 for Item
in OutPutFileList
:
1995 if Item
in SourceFileList
:
1996 SourceFileList
.remove(Item
)
1998 for file_path
in self
.IncludePathList
+ self
.BuildOptionIncPathList
:
1999 # skip the folders in platform BuildDir which are not been generated yet
2000 if file_path
.startswith(os
.path
.abspath(self
.PlatformInfo
.BuildDir
)+os
.sep
):
2002 SearchList
.append(file_path
)
2003 FileDependencyDict
= {}
2004 ForceIncludedFile
= []
2005 for F
in SourceFileList
:
2006 # skip the files which are not been generated yet, because
2007 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
2008 if not os
.path
.exists(F
.Path
):
2010 FileDependencyDict
[F
] = GenMake
.GetDependencyList(self
, self
.FileDependCache
, F
, ForceIncludedFile
, SearchList
)
2012 if FileDependencyDict
:
2013 for Dependency
in FileDependencyDict
.values():
2014 DependencyFileSet
.update(set(Dependency
))
2016 # Caculate all above dependency files hash
2017 # Initialze hash object
2020 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2021 if not os
.path
.exists(str(File
)):
2022 EdkLogger
.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2024 f
= open(str(File
), 'rb')
2028 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2031 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
2032 MewIR
.ModuleFilesHashDigest
= m
.digest()
2033 MewIR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2034 MewIR
.ModuleFilesChain
= FileList
2035 with GlobalData
.file_lock
:
2037 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2038 IR
.ModuleFilesHashDigest
= m
.digest()
2039 IR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2040 IR
.ModuleFilesChain
= FileList
2041 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2043 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
2045 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2047 def GenPreMakefileHash(self
, gDict
):
2048 # Early exit if module or library has been hashed and is in memory
2049 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2050 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2051 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2053 # skip binary module
2054 if self
.IsBinaryModule
:
2057 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2058 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2059 self
.GenModuleFilesHash(gDict
)
2061 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2062 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2063 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2066 # Initialze hash object
2069 # Add Platform level hash
2070 if ('PlatformHash') in gDict
:
2071 m
.update(gDict
[('PlatformHash')].encode('utf-8'))
2073 EdkLogger
.quiet("[cache warning]: PlatformHash is missing")
2075 # Add Package level hash
2076 if self
.DependentPackageList
:
2077 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
2078 if (Pkg
.PackageName
, 'PackageHash') in gDict
:
2079 m
.update(gDict
[(Pkg
.PackageName
, 'PackageHash')].encode('utf-8'))
2081 EdkLogger
.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg
.PackageName
, self
.MetaFile
.Name
, self
.Arch
))
2084 if self
.LibraryAutoGenList
:
2085 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2086 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2087 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
:
2088 Lib
.GenPreMakefileHash(gDict
)
2089 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
)
2092 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2094 with GlobalData
.file_lock
:
2095 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2096 IR
.PreMakefileHashHexDigest
= m
.hexdigest()
2097 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2099 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2101 def GenMakeHeaderFilesHash(self
, gDict
):
2102 # Early exit if module or library has been hashed and is in memory
2103 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2104 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2105 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2107 # skip binary module
2108 if self
.IsBinaryModule
:
2111 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2112 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
2114 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.libConstPcd
:
2115 self
.ConstPcd
= GlobalData
.libConstPcd
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2116 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.Refes
:
2117 self
.ReferenceModules
= GlobalData
.Refes
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2118 self
.CreateCodeFile()
2119 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2120 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2121 self
.CreateMakeFile(GenFfsList
=GlobalData
.FfsCmd
.get((self
.MetaFile
.File
, self
.Arch
),[]))
2123 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2124 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
or \
2125 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2126 EdkLogger
.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2129 DependencyFileSet
= set()
2131 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
:
2132 DependencyFileSet
.add(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
)
2134 EdkLogger
.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2137 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2138 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2139 DependencyFileSet
.add(File
)
2141 EdkLogger
.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2144 if self
.AutoGenFileList
:
2145 for File
in set(self
.AutoGenFileList
):
2146 DependencyFileSet
.add(File
)
2148 # Caculate all above dependency files hash
2149 # Initialze hash object
2152 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2153 if not os
.path
.exists(str(File
)):
2154 EdkLogger
.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2156 f
= open(str(File
), 'rb')
2160 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2162 with GlobalData
.file_lock
:
2163 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2164 IR
.AutoGenFileList
= self
.AutoGenFileList
.keys()
2165 IR
.MakeHeaderFilesHashChain
= FileList
2166 IR
.MakeHeaderFilesHashDigest
= m
.digest()
2167 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2169 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2171 def GenMakeHash(self
, gDict
):
2172 # Early exit if module or library has been hashed and is in memory
2173 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2174 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2175 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2177 # skip binary module
2178 if self
.IsBinaryModule
:
2181 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2182 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2183 self
.GenModuleFilesHash(gDict
)
2184 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2185 self
.GenMakeHeaderFilesHash(gDict
)
2187 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2188 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
or \
2189 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
or \
2190 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
or \
2191 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
:
2192 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2195 # Initialze hash object
2199 # Add hash of makefile and dependency header files
2200 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
)
2201 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
) - set(MakeHashChain
))
2202 New
.sort(key
=lambda x
: str(x
))
2203 MakeHashChain
+= New
2206 if self
.LibraryAutoGenList
:
2207 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2208 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2209 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
:
2210 Lib
.GenMakeHash(gDict
)
2211 if not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
:
2212 print("Cannot generate MakeHash for lib module:", Lib
.MetaFile
.Path
, Lib
.Arch
)
2214 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
)
2215 New
= list(set(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
) - set(MakeHashChain
))
2216 New
.sort(key
=lambda x
: str(x
))
2217 MakeHashChain
+= New
2220 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2221 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
) - set(MakeHashChain
))
2222 New
.sort(key
=lambda x
: str(x
))
2223 MakeHashChain
+= New
2225 with GlobalData
.file_lock
:
2226 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2227 IR
.MakeHashDigest
= m
.digest()
2228 IR
.MakeHashHexDigest
= m
.hexdigest()
2229 IR
.MakeHashChain
= MakeHashChain
2230 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2232 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2234 ## Decide whether we can skip the left autogen and make process
2235 def CanSkipbyPreMakefileCache(self
, gDict
):
2236 if not GlobalData
.gBinCacheSource
:
2239 # If Module is binary, do not skip by cache
2240 if self
.IsBinaryModule
:
2243 # .inc is contains binary information so do not skip by hash as well
2244 for f_ext
in self
.SourceFileList
:
2245 if '.inc' in str(f_ext
):
2248 # Get the module hash values from stored cache and currrent build
2249 # then check whether cache hit based on the hash values
2250 # if cache hit, restore all the files from cache
2251 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2252 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2254 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2255 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2256 if not os
.path
.exists(ModuleHashPair
):
2257 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2261 f
= open(ModuleHashPair
, 'r')
2262 ModuleHashPairList
= json
.load(f
)
2265 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2268 self
.GenPreMakefileHash(gDict
)
2269 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2270 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2271 EdkLogger
.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2275 CurrentPreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
2276 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2277 if PreMakefileHash
== CurrentPreMakeHash
:
2278 MakeHashStr
= str(MakeHash
)
2283 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2284 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2286 if not os
.path
.exists(TargetHashDir
):
2287 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2290 for root
, dir, files
in os
.walk(TargetHashDir
):
2292 File
= path
.join(root
, f
)
2293 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2294 if os
.path
.exists(TargetFfsHashDir
):
2295 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2297 File
= path
.join(root
, f
)
2298 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2300 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2301 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2303 with GlobalData
.file_lock
:
2304 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2305 IR
.PreMakeCacheHit
= True
2306 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2307 print("[cache hit]: checkpoint_PreMakefile:", self
.MetaFile
.Path
, self
.Arch
)
2308 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2311 ## Decide whether we can skip the make process
2312 def CanSkipbyMakeCache(self
, gDict
):
2313 if not GlobalData
.gBinCacheSource
:
2316 # If Module is binary, do not skip by cache
2317 if self
.IsBinaryModule
:
2318 print("[cache miss]: checkpoint_Makefile: binary module:", self
.MetaFile
.Path
, self
.Arch
)
2321 # .inc is contains binary information so do not skip by hash as well
2322 for f_ext
in self
.SourceFileList
:
2323 if '.inc' in str(f_ext
):
2324 with GlobalData
.file_lock
:
2325 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2326 IR
.MakeCacheHit
= False
2327 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2328 print("[cache miss]: checkpoint_Makefile: .inc module:", self
.MetaFile
.Path
, self
.Arch
)
2331 # Get the module hash values from stored cache and currrent build
2332 # then check whether cache hit based on the hash values
2333 # if cache hit, restore all the files from cache
2334 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2335 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2337 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2338 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2339 if not os
.path
.exists(ModuleHashPair
):
2340 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2344 f
= open(ModuleHashPair
, 'r')
2345 ModuleHashPairList
= json
.load(f
)
2348 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2351 self
.GenMakeHash(gDict
)
2352 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2353 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
2354 EdkLogger
.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2358 CurrentMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
2359 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2360 if MakeHash
== CurrentMakeHash
:
2361 MakeHashStr
= str(MakeHash
)
2364 print("[cache miss]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2367 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2368 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2369 if not os
.path
.exists(TargetHashDir
):
2370 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2373 for root
, dir, files
in os
.walk(TargetHashDir
):
2375 File
= path
.join(root
, f
)
2376 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2378 if os
.path
.exists(TargetFfsHashDir
):
2379 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2381 File
= path
.join(root
, f
)
2382 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2384 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2385 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2386 with GlobalData
.file_lock
:
2387 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2388 IR
.MakeCacheHit
= True
2389 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2390 print("[cache hit]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2393 ## Show the first file name which causes cache miss
2394 def PrintFirstMakeCacheMissFile(self
, gDict
):
2395 if not GlobalData
.gBinCacheSource
:
2398 # skip binary module
2399 if self
.IsBinaryModule
:
2402 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2405 # Only print cache miss file for the MakeCache not hit module
2406 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2409 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2410 EdkLogger
.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2413 # Find the cache dir name through the .ModuleHashPair file info
2414 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2416 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2417 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2418 if not os
.path
.exists(ModuleHashPair
):
2419 EdkLogger
.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2423 f
= open(ModuleHashPair
, 'r')
2424 ModuleHashPairList
= json
.load(f
)
2427 EdkLogger
.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2431 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2432 TargetHashDir
= path
.join(FileDir
, str(MakeHash
))
2433 if os
.path
.exists(TargetHashDir
):
2434 MakeHashSet
.add(MakeHash
)
2436 EdkLogger
.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2439 TargetHash
= list(MakeHashSet
)[0]
2440 TargetHashDir
= path
.join(FileDir
, str(TargetHash
))
2441 if len(MakeHashSet
) > 1 :
2442 EdkLogger
.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash
, self
.MetaFile
.Path
, self
.Arch
))
2444 ListFile
= path
.join(TargetHashDir
, self
.Name
+ '.MakeHashChain')
2445 if os
.path
.exists(ListFile
):
2447 f
= open(ListFile
, 'r')
2448 CachedList
= json
.load(f
)
2451 EdkLogger
.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile
)
2454 EdkLogger
.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile
)
2457 CurrentList
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
2458 for idx
, (file, hash) in enumerate (CurrentList
):
2459 (filecached
, hashcached
) = CachedList
[idx
]
2460 if file != filecached
:
2461 EdkLogger
.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self
.MetaFile
.Path
, self
.Arch
, file, filecached
))
2463 if hash != hashcached
:
2464 EdkLogger
.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self
.MetaFile
.Path
, self
.Arch
, file))
2469 ## Decide whether we can skip the ModuleAutoGen process
2470 def CanSkipbyCache(self
, gDict
):
2471 # Hashing feature is off
2472 if not GlobalData
.gBinCacheSource
:
2475 if self
in GlobalData
.gBuildHashSkipTracking
:
2476 return GlobalData
.gBuildHashSkipTracking
[self
]
2478 # If library or Module is binary do not skip by hash
2479 if self
.IsBinaryModule
:
2480 GlobalData
.gBuildHashSkipTracking
[self
] = False
2483 # .inc is contains binary information so do not skip by hash as well
2484 for f_ext
in self
.SourceFileList
:
2485 if '.inc' in str(f_ext
):
2486 GlobalData
.gBuildHashSkipTracking
[self
] = False
2489 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2492 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakeCacheHit
:
2493 GlobalData
.gBuildHashSkipTracking
[self
] = True
2496 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2497 GlobalData
.gBuildHashSkipTracking
[self
] = True
2502 ## Decide whether we can skip the ModuleAutoGen process
2503 # If any source file is newer than the module than we cannot skip
2506 # Don't skip if cache feature enabled
2507 if GlobalData
.gUseHashCache
or GlobalData
.gBinCacheDest
or GlobalData
.gBinCacheSource
:
2509 if self
.MakeFileDir
in GlobalData
.gSikpAutoGenCache
:
2511 if not os
.path
.exists(self
.TimeStampPath
):
2513 #last creation time of the module
2514 DstTimeStamp
= os
.stat(self
.TimeStampPath
)[8]
2516 SrcTimeStamp
= self
.Workspace
._SrcTimeStamp
2517 if SrcTimeStamp
> DstTimeStamp
:
2520 with
open(self
.TimeStampPath
,'r') as f
:
2522 source
= source
.rstrip('\n')
2523 if not os
.path
.exists(source
):
2525 if source
not in ModuleAutoGen
.TimeDict
:
2526 ModuleAutoGen
.TimeDict
[source
] = os
.stat(source
)[8]
2527 if ModuleAutoGen
.TimeDict
[source
] > DstTimeStamp
:
2529 GlobalData
.gSikpAutoGenCache
.add(self
.MakeFileDir
)
2533 def TimeStampPath(self
):
2534 return os
.path
.join(self
.MakeFileDir
, 'AutoGenTimeStamp')