2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 from AutoGen
.AutoGen
import AutoGen
9 from Common
.LongFilePathSupport
import CopyLongFilePath
10 from Common
.BuildToolError
import *
11 from Common
.DataType
import *
12 from Common
.Misc
import *
13 from Common
.StringUtils
import NormPath
,GetSplitList
14 from collections
import defaultdict
15 from Workspace
.WorkspaceCommon
import OrderedListDict
16 import os
.path
as path
19 from . import InfSectionParser
22 from . import GenDepex
23 from io
import BytesIO
24 from GenPatchPcdTable
.GenPatchPcdTable
import parsePcdInfoFromMapFile
25 from Workspace
.MetaFileCommentParser
import UsageList
26 from .GenPcdDb
import CreatePcdDatabaseCode
27 from Common
.caching
import cached_class_function
28 from AutoGen
.ModuleAutoGenHelper
import PlatformInfo
,WorkSpaceInfo
29 from AutoGen
.CacheIR
import ModuleBuildCacheIR
32 ## Mapping Makefile type
33 gMakeTypeMap
= {TAB_COMPILER_MSFT
:"nmake", "GCC":"gmake"}
35 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
36 # is the former use /I , the Latter used -I to specify include directories
38 gBuildOptIncludePatternMsft
= re
.compile(r
"(?:.*?)/I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
39 gBuildOptIncludePatternOther
= re
.compile(r
"(?:.*?)-I[ \t]*([^ ]*)", re
.MULTILINE | re
.DOTALL
)
41 ## default file name for AutoGen
42 gAutoGenCodeFileName
= "AutoGen.c"
43 gAutoGenHeaderFileName
= "AutoGen.h"
44 gAutoGenStringFileName
= "%(module_name)sStrDefs.h"
45 gAutoGenStringFormFileName
= "%(module_name)sStrDefs.hpk"
46 gAutoGenDepexFileName
= "%(module_name)s.depex"
47 gAutoGenImageDefFileName
= "%(module_name)sImgDefs.h"
48 gAutoGenIdfFileName
= "%(module_name)sIdf.hpk"
49 gInfSpecVersion
= "0x00010017"
52 # Match name = variable
54 gEfiVarStoreNamePattern
= re
.compile("\s*name\s*=\s*(\w+)")
56 # The format of guid in efivarstore statement likes following and must be correct:
57 # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
59 gEfiVarStoreGuidPattern
= re
.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
62 # Template string to generic AsBuilt INF
64 gAsBuiltInfHeaderString
= TemplateString("""${header_comments}
70 INF_VERSION = ${module_inf_version}
71 BASE_NAME = ${module_name}
72 FILE_GUID = ${module_guid}
73 MODULE_TYPE = ${module_module_type}${BEGIN}
74 VERSION_STRING = ${module_version_string}${END}${BEGIN}
75 PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
76 UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
77 PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
78 ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
79 UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
80 CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
81 DESTRUCTOR = ${module_destructor}${END}${BEGIN}
82 SHADOW = ${module_shadow}${END}${BEGIN}
83 PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
84 PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
85 PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
86 PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
87 BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
88 SPEC = ${module_spec}${END}${BEGIN}
89 UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
90 MODULE_UNI_FILE = ${module_uni_file}${END}
92 [Packages.${module_arch}]${BEGIN}
95 [Binaries.${module_arch}]${BEGIN}
98 [PatchPcd.${module_arch}]${BEGIN}
102 [Protocols.${module_arch}]${BEGIN}
106 [Ppis.${module_arch}]${BEGIN}
110 [Guids.${module_arch}]${BEGIN}
114 [PcdEx.${module_arch}]${BEGIN}
118 [LibraryClasses.${module_arch}]
119 ## @LIB_INSTANCES${BEGIN}
120 # ${libraryclasses_item}${END}
124 ${userextension_tianocore_item}
128 [BuildOptions.${module_arch}]
130 ## ${flags_item}${END}
133 # extend lists contained in a dictionary with lists stored in another dictionary
134 # if CopyToDict is not derived from DefaultDict(list) then this may raise exception
136 def ExtendCopyDictionaryLists(CopyToDict
, CopyFromDict
):
137 for Key
in CopyFromDict
:
138 CopyToDict
[Key
].extend(CopyFromDict
[Key
])
140 # Create a directory specified by a set of path elements and return the full path
141 def _MakeDir(PathList
):
142 RetVal
= path
.join(*PathList
)
143 CreateDirectory(RetVal
)
147 # Convert string to C format array
149 def _ConvertStringToByteArray(Value
):
150 Value
= Value
.strip()
154 if not Value
.endswith('}'):
156 Value
= Value
.replace(' ', '').replace('{', '').replace('}', '')
157 ValFields
= Value
.split(',')
159 for Index
in range(len(ValFields
)):
160 ValFields
[Index
] = str(int(ValFields
[Index
], 0))
163 Value
= '{' + ','.join(ValFields
) + '}'
167 if Value
.startswith('L"'):
168 if not Value
.endswith('"'):
172 elif not Value
.startswith('"') or not Value
.endswith('"'):
175 Value
= eval(Value
) # translate escape character
177 for Index
in range(0, len(Value
)):
179 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x10000) + ','
181 NewValue
= NewValue
+ str(ord(Value
[Index
]) % 0x100) + ','
182 Value
= NewValue
+ '0}'
185 ## ModuleAutoGen class
187 # This class encapsules the AutoGen behaviors for the build tools. In addition to
188 # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
189 # to the [depex] section in module's inf file.
191 class ModuleAutoGen(AutoGen
):
192 # call super().__init__ then call the worker function with different parameter count
193 def __init__(self
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
194 if not hasattr(self
, "_Init"):
195 self
._InitWorker
(Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
)
198 ## Cache the timestamps of metafiles of every module in a class attribute
202 def __new__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
):
203 # check if this module is employed by active platform
204 if not PlatformInfo(Workspace
, args
[0], Target
, Toolchain
, Arch
,args
[-1]).ValidModule(MetaFile
):
205 EdkLogger
.verbose("Module [%s] for [%s] is not employed by active platform\n" \
208 return super(ModuleAutoGen
, cls
).__new
__(cls
, Workspace
, MetaFile
, Target
, Toolchain
, Arch
, *args
, **kwargs
)
210 ## Initialize ModuleAutoGen
212 # @param Workspace EdkIIWorkspaceBuild object
213 # @param ModuleFile The path of module file
214 # @param Target Build target (DEBUG, RELEASE)
215 # @param Toolchain Name of tool chain
216 # @param Arch The arch the module supports
217 # @param PlatformFile Platform meta-file
219 def _InitWorker(self
, Workspace
, ModuleFile
, Target
, Toolchain
, Arch
, PlatformFile
,DataPipe
):
220 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "AutoGen module [%s] [%s]" % (ModuleFile
, Arch
))
221 GlobalData
.gProcessingFile
= "%s [%s, %s, %s]" % (ModuleFile
, Arch
, Toolchain
, Target
)
223 self
.Workspace
= Workspace
224 self
.WorkspaceDir
= ""
225 self
.PlatformInfo
= None
226 self
.DataPipe
= DataPipe
227 self
.__init
_platform
_info
__()
228 self
.MetaFile
= ModuleFile
229 self
.SourceDir
= self
.MetaFile
.SubDir
230 self
.SourceDir
= mws
.relpath(self
.SourceDir
, self
.WorkspaceDir
)
232 self
.ToolChain
= Toolchain
233 self
.BuildTarget
= Target
235 self
.ToolChainFamily
= self
.PlatformInfo
.ToolChainFamily
236 self
.BuildRuleFamily
= self
.PlatformInfo
.BuildRuleFamily
238 self
.IsCodeFileCreated
= False
239 self
.IsAsBuiltInfCreated
= False
240 self
.DepexGenerated
= False
242 self
.BuildDatabase
= self
.Workspace
.BuildDatabase
243 self
.BuildRuleOrder
= None
246 self
._GuidComments
= OrderedListDict()
247 self
._ProtocolComments
= OrderedListDict()
248 self
._PpiComments
= OrderedListDict()
249 self
._BuildTargets
= None
250 self
._IntroBuildTargetList
= None
251 self
._FinalBuildTargetList
= None
252 self
._FileTypes
= None
254 self
.AutoGenDepSet
= set()
255 self
.ReferenceModules
= []
258 self
.FileDependCache
= {}
260 def __init_platform_info__(self
):
261 pinfo
= self
.DataPipe
.Get("P_Info")
262 self
.WorkspaceDir
= pinfo
.get("WorkspaceDir")
263 self
.PlatformInfo
= PlatformInfo(self
.Workspace
,pinfo
.get("ActivePlatform"),pinfo
.get("Target"),pinfo
.get("ToolChain"),pinfo
.get("Arch"),self
.DataPipe
)
264 ## hash() operator of ModuleAutoGen
266 # The module file path and arch string will be used to represent
267 # hash value of this object
269 # @retval int Hash value of the module file path and arch
271 @cached_class_function
273 return hash((self
.MetaFile
, self
.Arch
))
275 return "%s [%s]" % (self
.MetaFile
, self
.Arch
)
277 # Get FixedAtBuild Pcds of this Module
279 def FixedAtBuildPcds(self
):
281 for Pcd
in self
.ModulePcdList
:
282 if Pcd
.Type
!= TAB_PCDS_FIXED_AT_BUILD
:
284 if Pcd
not in RetVal
:
289 def FixedVoidTypePcds(self
):
291 for Pcd
in self
.FixedAtBuildPcds
:
292 if Pcd
.DatumType
== TAB_VOID
:
293 if '.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
)) not in RetVal
:
294 RetVal
['.'.join((Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
))] = Pcd
.DefaultValue
298 def UniqueBaseName(self
):
299 ModuleNames
= self
.DataPipe
.Get("M_Name")
302 return ModuleNames
.get((self
.Name
,self
.MetaFile
),self
.Name
)
304 # Macros could be used in build_rule.txt (also Makefile)
308 ("WORKSPACE" ,self
.WorkspaceDir
),
309 ("MODULE_NAME" ,self
.Name
),
310 ("MODULE_NAME_GUID" ,self
.UniqueBaseName
),
311 ("MODULE_GUID" ,self
.Guid
),
312 ("MODULE_VERSION" ,self
.Version
),
313 ("MODULE_TYPE" ,self
.ModuleType
),
314 ("MODULE_FILE" ,str(self
.MetaFile
)),
315 ("MODULE_FILE_BASE_NAME" ,self
.MetaFile
.BaseName
),
316 ("MODULE_RELATIVE_DIR" ,self
.SourceDir
),
317 ("MODULE_DIR" ,self
.SourceDir
),
318 ("BASE_NAME" ,self
.Name
),
320 ("TOOLCHAIN" ,self
.ToolChain
),
321 ("TOOLCHAIN_TAG" ,self
.ToolChain
),
322 ("TOOL_CHAIN_TAG" ,self
.ToolChain
),
323 ("TARGET" ,self
.BuildTarget
),
324 ("BUILD_DIR" ,self
.PlatformInfo
.BuildDir
),
325 ("BIN_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
326 ("LIB_DIR" ,os
.path
.join(self
.PlatformInfo
.BuildDir
, self
.Arch
)),
327 ("MODULE_BUILD_DIR" ,self
.BuildDir
),
328 ("OUTPUT_DIR" ,self
.OutputDir
),
329 ("DEBUG_DIR" ,self
.DebugDir
),
330 ("DEST_DIR_OUTPUT" ,self
.OutputDir
),
331 ("DEST_DIR_DEBUG" ,self
.DebugDir
),
332 ("PLATFORM_NAME" ,self
.PlatformInfo
.Name
),
333 ("PLATFORM_GUID" ,self
.PlatformInfo
.Guid
),
334 ("PLATFORM_VERSION" ,self
.PlatformInfo
.Version
),
335 ("PLATFORM_RELATIVE_DIR" ,self
.PlatformInfo
.SourceDir
),
336 ("PLATFORM_DIR" ,mws
.join(self
.WorkspaceDir
, self
.PlatformInfo
.SourceDir
)),
337 ("PLATFORM_OUTPUT_DIR" ,self
.PlatformInfo
.OutputDir
),
338 ("FFS_OUTPUT_DIR" ,self
.FfsOutputDir
)
341 ## Return the module build data object
344 return self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
346 ## Return the module name
349 return self
.Module
.BaseName
351 ## Return the module DxsFile if exist
354 return self
.Module
.DxsFile
356 ## Return the module meta-file GUID
360 # To build same module more than once, the module path with FILE_GUID overridden has
361 # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
362 # in DSC. The overridden GUID can be retrieved from file name
364 if os
.path
.basename(self
.MetaFile
.File
) != os
.path
.basename(self
.MetaFile
.Path
):
366 # Length of GUID is 36
368 return os
.path
.basename(self
.MetaFile
.Path
)[:36]
369 return self
.Module
.Guid
371 ## Return the module version
374 return self
.Module
.Version
376 ## Return the module type
378 def ModuleType(self
):
379 return self
.Module
.ModuleType
381 ## Return the component type (for Edk.x style of module)
383 def ComponentType(self
):
384 return self
.Module
.ComponentType
386 ## Return the build type
389 return self
.Module
.BuildType
391 ## Return the PCD_IS_DRIVER setting
393 def PcdIsDriver(self
):
394 return self
.Module
.PcdIsDriver
396 ## Return the autogen version, i.e. module meta-file version
398 def AutoGenVersion(self
):
399 return self
.Module
.AutoGenVersion
401 ## Check if the module is library or not
404 return bool(self
.Module
.LibraryClass
)
406 ## Check if the module is binary module or not
408 def IsBinaryModule(self
):
409 return self
.Module
.IsBinaryModule
411 ## Return the directory to store intermediate files of the module
415 self
.PlatformInfo
.BuildDir
,
418 self
.MetaFile
.BaseName
421 ## Return the directory to store the intermediate object files of the module
424 return _MakeDir((self
.BuildDir
, "OUTPUT"))
426 ## Return the directory path to store ffs file
428 def FfsOutputDir(self
):
429 if GlobalData
.gFdfParser
:
430 return path
.join(self
.PlatformInfo
.BuildDir
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
433 ## Return the directory to store auto-gened source files of the module
436 return _MakeDir((self
.BuildDir
, "DEBUG"))
438 ## Return the path of custom file
440 def CustomMakefile(self
):
442 for Type
in self
.Module
.CustomMakefile
:
443 MakeType
= gMakeTypeMap
[Type
] if Type
in gMakeTypeMap
else 'nmake'
444 File
= os
.path
.join(self
.SourceDir
, self
.Module
.CustomMakefile
[Type
])
445 RetVal
[MakeType
] = File
448 ## Return the directory of the makefile
450 # @retval string The directory string of module's makefile
453 def MakeFileDir(self
):
456 ## Return build command string
458 # @retval string Build command string
461 def BuildCommand(self
):
462 return self
.PlatformInfo
.BuildCommand
464 ## Get object list of all packages the module and its dependent libraries belong to
466 # @retval list The list of package object
469 def DerivedPackageList(self
):
471 for M
in [self
.Module
] + self
.DependentLibraryList
:
472 for Package
in M
.Packages
:
473 if Package
in PackageList
:
475 PackageList
.append(Package
)
478 ## Get the depex string
480 # @return : a string contain all depex expression.
481 def _GetDepexExpresionString(self
):
484 ## DPX_SOURCE IN Define section.
485 if self
.Module
.DxsFile
:
487 for M
in [self
.Module
] + self
.DependentLibraryList
:
488 Filename
= M
.MetaFile
.Path
489 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
490 DepexExpressionList
= InfObj
.GetDepexExpresionList()
491 for DepexExpression
in DepexExpressionList
:
492 for key
in DepexExpression
:
493 Arch
, ModuleType
= key
494 DepexExpr
= [x
for x
in DepexExpression
[key
] if not str(x
).startswith('#')]
495 # the type of build module is USER_DEFINED.
496 # All different DEPEX section tags would be copied into the As Built INF file
497 # and there would be separate DEPEX section tags
498 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
499 if (Arch
.upper() == self
.Arch
.upper()) and (ModuleType
.upper() != TAB_ARCH_COMMON
):
500 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
502 if Arch
.upper() == TAB_ARCH_COMMON
or \
503 (Arch
.upper() == self
.Arch
.upper() and \
504 ModuleType
.upper() in [TAB_ARCH_COMMON
, self
.ModuleType
.upper()]):
505 DepexList
.append({(Arch
, ModuleType
): DepexExpr
})
507 #the type of build module is USER_DEFINED.
508 if self
.ModuleType
.upper() == SUP_MODULE_USER_DEFINED
or self
.ModuleType
.upper() == SUP_MODULE_HOST_APPLICATION
:
509 for Depex
in DepexList
:
511 DepexStr
+= '[Depex.%s.%s]\n' % key
512 DepexStr
+= '\n'.join('# '+ val
for val
in Depex
[key
])
515 return '[Depex.%s]\n' % self
.Arch
518 #the type of build module not is USER_DEFINED.
520 for Depex
in DepexList
:
525 for D
in Depex
.values():
526 DepexStr
+= ' '.join(val
for val
in D
)
527 Index
= DepexStr
.find('END')
528 if Index
> -1 and Index
== len(DepexStr
) - 3:
529 DepexStr
= DepexStr
[:-3]
530 DepexStr
= DepexStr
.strip()
533 DepexStr
= DepexStr
.lstrip('(').rstrip(')').strip()
535 return '[Depex.%s]\n' % self
.Arch
536 return '[Depex.%s]\n# ' % self
.Arch
+ DepexStr
538 ## Merge dependency expression
540 # @retval list The token list of the dependency expression after parsed
544 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
549 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
551 FixedVoidTypePcds
= {}
552 for M
in [self
] + self
.LibraryAutoGenList
:
553 FixedVoidTypePcds
.update(M
.FixedVoidTypePcds
)
554 for M
in [self
] + self
.LibraryAutoGenList
:
556 for D
in M
.Module
.Depex
[self
.Arch
, self
.ModuleType
]:
558 DepexList
.append('AND')
559 DepexList
.append('(')
560 #replace D with value if D is FixedAtBuild PCD
567 Value
= FixedVoidTypePcds
[item
]
568 if len(Value
.split(',')) != 16:
569 EdkLogger
.error("build", FORMAT_INVALID
,
570 "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item
))
571 NewList
.append(Value
)
573 EdkLogger
.error("build", FORMAT_INVALID
, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item
))
575 DepexList
.extend(NewList
)
576 if DepexList
[-1] == 'END': # no need of a END at this time
578 DepexList
.append(')')
581 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.Module
.BaseName
, DepexList
))
582 if 'BEFORE' in DepexList
or 'AFTER' in DepexList
:
584 if len(DepexList
) > 0:
585 EdkLogger
.verbose('')
586 return {self
.ModuleType
:DepexList
}
588 ## Merge dependency expression
590 # @retval list The token list of the dependency expression after parsed
593 def DepexExpressionDict(self
):
594 if self
.DxsFile
or self
.IsLibrary
or TAB_DEPENDENCY_EXPRESSION_FILE
in self
.FileTypes
:
597 DepexExpressionString
= ''
599 # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
601 for M
in [self
.Module
] + self
.DependentLibraryList
:
603 for D
in M
.DepexExpression
[self
.Arch
, self
.ModuleType
]:
604 if DepexExpressionString
!= '':
605 DepexExpressionString
+= ' AND '
606 DepexExpressionString
+= '('
607 DepexExpressionString
+= D
608 DepexExpressionString
= DepexExpressionString
.rstrip('END').strip()
609 DepexExpressionString
+= ')'
612 EdkLogger
.verbose("DEPEX[%s] (+%s) = %s" % (self
.Name
, M
.BaseName
, DepexExpressionString
))
613 if 'BEFORE' in DepexExpressionString
or 'AFTER' in DepexExpressionString
:
615 if len(DepexExpressionString
) > 0:
616 EdkLogger
.verbose('')
618 return {self
.ModuleType
:DepexExpressionString
}
620 # Get the tiano core user extension, it is contain dependent library.
621 # @retval: a list contain tiano core userextension.
623 def _GetTianoCoreUserExtensionList(self
):
624 TianoCoreUserExtentionList
= []
625 for M
in [self
.Module
] + self
.DependentLibraryList
:
626 Filename
= M
.MetaFile
.Path
627 InfObj
= InfSectionParser
.InfSectionParser(Filename
)
628 TianoCoreUserExtenList
= InfObj
.GetUserExtensionTianoCore()
629 for TianoCoreUserExtent
in TianoCoreUserExtenList
:
630 for Section
in TianoCoreUserExtent
:
631 ItemList
= Section
.split(TAB_SPLIT
)
633 if len(ItemList
) == 4:
635 if Arch
.upper() == TAB_ARCH_COMMON
or Arch
.upper() == self
.Arch
.upper():
637 TianoCoreList
.extend([TAB_SECTION_START
+ Section
+ TAB_SECTION_END
])
638 TianoCoreList
.extend(TianoCoreUserExtent
[Section
][:])
639 TianoCoreList
.append('\n')
640 TianoCoreUserExtentionList
.append(TianoCoreList
)
642 return TianoCoreUserExtentionList
644 ## Return the list of specification version required for the module
646 # @retval list The list of specification defined in module file
649 def Specification(self
):
650 return self
.Module
.Specification
652 ## Tool option for the module build
654 # @param PlatformInfo The object of PlatformBuildInfo
655 # @retval dict The dict containing valid options
658 def BuildOption(self
):
659 RetVal
, self
.BuildRuleOrder
= self
.PlatformInfo
.ApplyBuildOption(self
.Module
)
660 if self
.BuildRuleOrder
:
661 self
.BuildRuleOrder
= ['.%s' % Ext
for Ext
in self
.BuildRuleOrder
.split()]
664 ## Get include path list from tool option for the module build
666 # @retval list The include path list
669 def BuildOptionIncPathList(self
):
671 # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
672 # is the former use /I , the Latter used -I to specify include directories
674 if self
.PlatformInfo
.ToolChainFamily
in (TAB_COMPILER_MSFT
):
675 BuildOptIncludeRegEx
= gBuildOptIncludePatternMsft
676 elif self
.PlatformInfo
.ToolChainFamily
in ('INTEL', 'GCC', 'RVCT'):
677 BuildOptIncludeRegEx
= gBuildOptIncludePatternOther
680 # New ToolChainFamily, don't known whether there is option to specify include directories
685 for Tool
in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
687 FlagOption
= self
.BuildOption
[Tool
]['FLAGS']
691 if self
.ToolChainFamily
!= 'RVCT':
692 IncPathList
= [NormPath(Path
, self
.Macros
) for Path
in BuildOptIncludeRegEx
.findall(FlagOption
)]
695 # RVCT may specify a list of directory seperated by commas
698 for Path
in BuildOptIncludeRegEx
.findall(FlagOption
):
699 PathList
= GetSplitList(Path
, TAB_COMMA_SPLIT
)
700 IncPathList
.extend(NormPath(PathEntry
, self
.Macros
) for PathEntry
in PathList
)
703 # EDK II modules must not reference header files outside of the packages they depend on or
704 # within the module's directory tree. Report error if violation.
706 if GlobalData
.gDisableIncludePathCheck
== False:
707 for Path
in IncPathList
:
708 if (Path
not in self
.IncludePathList
) and (CommonPath([Path
, self
.MetaFile
.Dir
]) != self
.MetaFile
.Dir
):
709 ErrMsg
= "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path
, Tool
, FlagOption
)
710 EdkLogger
.error("build",
713 File
=str(self
.MetaFile
))
714 RetVal
+= IncPathList
717 ## Return a list of files which can be built from source
719 # What kind of files can be built is determined by build rules in
720 # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
723 def SourceFileList(self
):
725 ToolChainTagSet
= {"", TAB_STAR
, self
.ToolChain
}
726 ToolChainFamilySet
= {"", TAB_STAR
, self
.ToolChainFamily
, self
.BuildRuleFamily
}
727 for F
in self
.Module
.Sources
:
729 if F
.TagName
not in ToolChainTagSet
:
730 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "The toolchain [%s] for processing file [%s] is found, "
731 "but [%s] is currently used" % (F
.TagName
, str(F
), self
.ToolChain
))
733 # match tool chain family or build rule family
734 if F
.ToolChainFamily
not in ToolChainFamilySet
:
737 "The file [%s] must be built by tools of [%s], " \
738 "but current toolchain family is [%s], buildrule family is [%s]" \
739 % (str(F
), F
.ToolChainFamily
, self
.ToolChainFamily
, self
.BuildRuleFamily
))
742 # add the file path into search path list for file including
743 if F
.Dir
not in self
.IncludePathList
:
744 self
.IncludePathList
.insert(0, F
.Dir
)
747 self
._MatchBuildRuleOrder
(RetVal
)
750 self
._ApplyBuildRule
(F
, TAB_UNKNOWN_FILE
)
753 def _MatchBuildRuleOrder(self
, FileList
):
756 for SingleFile
in FileList
:
757 if self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRuleOrder
and SingleFile
.Ext
in self
.BuildRules
:
758 key
= SingleFile
.Path
.rsplit(SingleFile
.Ext
,1)[0]
759 if key
in Order_Dict
:
760 Order_Dict
[key
].append(SingleFile
.Ext
)
762 Order_Dict
[key
] = [SingleFile
.Ext
]
766 if len(Order_Dict
[F
]) > 1:
767 Order_Dict
[F
].sort(key
=lambda i
: self
.BuildRuleOrder
.index(i
))
768 for Ext
in Order_Dict
[F
][1:]:
769 RemoveList
.append(F
+ Ext
)
771 for item
in RemoveList
:
772 FileList
.remove(item
)
776 ## Return the list of unicode files
778 def UnicodeFileList(self
):
779 return self
.FileTypes
.get(TAB_UNICODE_FILE
,[])
781 ## Return the list of vfr files
783 def VfrFileList(self
):
784 return self
.FileTypes
.get(TAB_VFR_FILE
, [])
786 ## Return the list of Image Definition files
788 def IdfFileList(self
):
789 return self
.FileTypes
.get(TAB_IMAGE_FILE
,[])
791 ## Return a list of files which can be built from binary
793 # "Build" binary files are just to copy them to build directory.
795 # @retval list The list of files which can be built later
798 def BinaryFileList(self
):
800 for F
in self
.Module
.Binaries
:
801 if F
.Target
not in [TAB_ARCH_COMMON
, TAB_STAR
] and F
.Target
!= self
.BuildTarget
:
804 self
._ApplyBuildRule
(F
, F
.Type
, BinaryFileList
=RetVal
)
808 def BuildRules(self
):
810 BuildRuleDatabase
= self
.PlatformInfo
.BuildRule
811 for Type
in BuildRuleDatabase
.FileTypeList
:
812 #first try getting build rule by BuildRuleFamily
813 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.BuildRuleFamily
]
815 # build type is always module type, but ...
816 if self
.ModuleType
!= self
.BuildType
:
817 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.BuildRuleFamily
]
818 #second try getting build rule by ToolChainFamily
820 RuleObject
= BuildRuleDatabase
[Type
, self
.BuildType
, self
.Arch
, self
.ToolChainFamily
]
822 # build type is always module type, but ...
823 if self
.ModuleType
!= self
.BuildType
:
824 RuleObject
= BuildRuleDatabase
[Type
, self
.ModuleType
, self
.Arch
, self
.ToolChainFamily
]
827 RuleObject
= RuleObject
.Instantiate(self
.Macros
)
828 RetVal
[Type
] = RuleObject
829 for Ext
in RuleObject
.SourceFileExtList
:
830 RetVal
[Ext
] = RuleObject
833 def _ApplyBuildRule(self
, File
, FileType
, BinaryFileList
=None):
834 if self
._BuildTargets
is None:
835 self
._IntroBuildTargetList
= set()
836 self
._FinalBuildTargetList
= set()
837 self
._BuildTargets
= defaultdict(set)
838 self
._FileTypes
= defaultdict(set)
840 if not BinaryFileList
:
841 BinaryFileList
= self
.BinaryFileList
843 SubDirectory
= os
.path
.join(self
.OutputDir
, File
.SubDir
)
844 if not os
.path
.exists(SubDirectory
):
845 CreateDirectory(SubDirectory
)
851 # Make sure to get build rule order value
855 while Index
< len(SourceList
):
856 Source
= SourceList
[Index
]
860 CreateDirectory(Source
.Dir
)
862 if File
.IsBinary
and File
== Source
and File
in BinaryFileList
:
863 # Skip all files that are not binary libraries
864 if not self
.IsLibrary
:
866 RuleObject
= self
.BuildRules
[TAB_DEFAULT_BINARY_FILE
]
867 elif FileType
in self
.BuildRules
:
868 RuleObject
= self
.BuildRules
[FileType
]
869 elif Source
.Ext
in self
.BuildRules
:
870 RuleObject
= self
.BuildRules
[Source
.Ext
]
872 # stop at no more rules
874 self
._FinalBuildTargetList
.add(LastTarget
)
877 FileType
= RuleObject
.SourceFileType
878 self
._FileTypes
[FileType
].add(Source
)
880 # stop at STATIC_LIBRARY for library
881 if self
.IsLibrary
and FileType
== TAB_STATIC_LIBRARY
:
883 self
._FinalBuildTargetList
.add(LastTarget
)
886 Target
= RuleObject
.Apply(Source
, self
.BuildRuleOrder
)
889 self
._FinalBuildTargetList
.add(LastTarget
)
891 elif not Target
.Outputs
:
892 # Only do build for target with outputs
893 self
._FinalBuildTargetList
.add(Target
)
895 self
._BuildTargets
[FileType
].add(Target
)
897 if not Source
.IsBinary
and Source
== File
:
898 self
._IntroBuildTargetList
.add(Target
)
900 # to avoid cyclic rule
901 if FileType
in RuleChain
:
904 RuleChain
.add(FileType
)
905 SourceList
.extend(Target
.Outputs
)
907 FileType
= TAB_UNKNOWN_FILE
911 if self
._BuildTargets
is None:
912 self
._IntroBuildTargetList
= set()
913 self
._FinalBuildTargetList
= set()
914 self
._BuildTargets
= defaultdict(set)
915 self
._FileTypes
= defaultdict(set)
917 #TRICK: call SourceFileList property to apply build rule for source files
920 #TRICK: call _GetBinaryFileList to apply build rule for binary files
923 return self
._BuildTargets
926 def IntroTargetList(self
):
928 return self
._IntroBuildTargetList
931 def CodaTargetList(self
):
933 return self
._FinalBuildTargetList
938 return self
._FileTypes
940 ## Get the list of package object the module depends on
942 # @retval list The package object list
945 def DependentPackageList(self
):
946 return self
.Module
.Packages
948 ## Return the list of auto-generated code file
950 # @retval list The list of auto-generated file
953 def AutoGenFileList(self
):
954 AutoGenUniIdf
= self
.BuildType
!= 'UEFI_HII'
955 UniStringBinBuffer
= BytesIO()
956 IdfGenBinBuffer
= BytesIO()
958 AutoGenC
= TemplateString()
959 AutoGenH
= TemplateString()
960 StringH
= TemplateString()
961 StringIdf
= TemplateString()
962 GenC
.CreateCode(self
, AutoGenC
, AutoGenH
, StringH
, AutoGenUniIdf
, UniStringBinBuffer
, StringIdf
, AutoGenUniIdf
, IdfGenBinBuffer
)
964 # AutoGen.c is generated if there are library classes in inf, or there are object files
966 if str(AutoGenC
) != "" and (len(self
.Module
.LibraryClasses
) > 0
967 or TAB_OBJECT_FILE
in self
.FileTypes
):
968 AutoFile
= PathClass(gAutoGenCodeFileName
, self
.DebugDir
)
969 RetVal
[AutoFile
] = str(AutoGenC
)
970 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
971 if str(AutoGenH
) != "":
972 AutoFile
= PathClass(gAutoGenHeaderFileName
, self
.DebugDir
)
973 RetVal
[AutoFile
] = str(AutoGenH
)
974 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
975 if str(StringH
) != "":
976 AutoFile
= PathClass(gAutoGenStringFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
977 RetVal
[AutoFile
] = str(StringH
)
978 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
979 if UniStringBinBuffer
is not None and UniStringBinBuffer
.getvalue() != b
"":
980 AutoFile
= PathClass(gAutoGenStringFormFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
981 RetVal
[AutoFile
] = UniStringBinBuffer
.getvalue()
982 AutoFile
.IsBinary
= True
983 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
984 if UniStringBinBuffer
is not None:
985 UniStringBinBuffer
.close()
986 if str(StringIdf
) != "":
987 AutoFile
= PathClass(gAutoGenImageDefFileName
% {"module_name":self
.Name
}, self
.DebugDir
)
988 RetVal
[AutoFile
] = str(StringIdf
)
989 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
990 if IdfGenBinBuffer
is not None and IdfGenBinBuffer
.getvalue() != b
"":
991 AutoFile
= PathClass(gAutoGenIdfFileName
% {"module_name":self
.Name
}, self
.OutputDir
)
992 RetVal
[AutoFile
] = IdfGenBinBuffer
.getvalue()
993 AutoFile
.IsBinary
= True
994 self
._ApplyBuildRule
(AutoFile
, TAB_UNKNOWN_FILE
)
995 if IdfGenBinBuffer
is not None:
996 IdfGenBinBuffer
.close()
999 ## Return the list of library modules explicitly or implicitly used by this module
1001 def DependentLibraryList(self
):
1002 # only merge library classes and PCD for non-library module
1005 return self
.PlatformInfo
.ApplyLibraryInstance(self
.Module
)
1007 ## Get the list of PCDs from current module
1009 # @retval list The list of PCD
1012 def ModulePcdList(self
):
1013 # apply PCD settings from platform
1014 RetVal
= self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, self
.Module
.Pcds
)
1018 def _PcdComments(self
):
1019 ReVal
= OrderedListDict()
1020 ExtendCopyDictionaryLists(ReVal
, self
.Module
.PcdComments
)
1021 if not self
.IsLibrary
:
1022 for Library
in self
.DependentLibraryList
:
1023 ExtendCopyDictionaryLists(ReVal
, Library
.PcdComments
)
1026 ## Get the list of PCDs from dependent libraries
1028 # @retval list The list of PCD
1031 def LibraryPcdList(self
):
1036 # get PCDs from dependent libraries
1037 for Library
in self
.DependentLibraryList
:
1038 PcdsInLibrary
= OrderedDict()
1039 for Key
in Library
.Pcds
:
1040 # skip duplicated PCDs
1041 if Key
in self
.Module
.Pcds
or Key
in Pcds
:
1044 PcdsInLibrary
[Key
] = copy
.copy(Library
.Pcds
[Key
])
1045 RetVal
.extend(self
.PlatformInfo
.ApplyPcdSetting(self
.Module
, PcdsInLibrary
, Library
=Library
))
1048 ## Get the GUID value mapping
1050 # @retval dict The mapping between GUID cname and its value
1054 RetVal
= self
.Module
.Guids
1055 for Library
in self
.DependentLibraryList
:
1056 RetVal
.update(Library
.Guids
)
1057 ExtendCopyDictionaryLists(self
._GuidComments
, Library
.GuidComments
)
1058 ExtendCopyDictionaryLists(self
._GuidComments
, self
.Module
.GuidComments
)
1062 def GetGuidsUsedByPcd(self
):
1063 RetVal
= OrderedDict(self
.Module
.GetGuidsUsedByPcd())
1064 for Library
in self
.DependentLibraryList
:
1065 RetVal
.update(Library
.GetGuidsUsedByPcd())
1067 ## Get the protocol value mapping
1069 # @retval dict The mapping between protocol cname and its value
1072 def ProtocolList(self
):
1073 RetVal
= OrderedDict(self
.Module
.Protocols
)
1074 for Library
in self
.DependentLibraryList
:
1075 RetVal
.update(Library
.Protocols
)
1076 ExtendCopyDictionaryLists(self
._ProtocolComments
, Library
.ProtocolComments
)
1077 ExtendCopyDictionaryLists(self
._ProtocolComments
, self
.Module
.ProtocolComments
)
1080 ## Get the PPI value mapping
1082 # @retval dict The mapping between PPI cname and its value
1086 RetVal
= OrderedDict(self
.Module
.Ppis
)
1087 for Library
in self
.DependentLibraryList
:
1088 RetVal
.update(Library
.Ppis
)
1089 ExtendCopyDictionaryLists(self
._PpiComments
, Library
.PpiComments
)
1090 ExtendCopyDictionaryLists(self
._PpiComments
, self
.Module
.PpiComments
)
1093 ## Get the list of include search path
1095 # @retval list The list path
1098 def IncludePathList(self
):
1100 RetVal
.append(self
.MetaFile
.Dir
)
1101 RetVal
.append(self
.DebugDir
)
1103 for Package
in self
.Module
.Packages
:
1104 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1105 if PackageDir
not in RetVal
:
1106 RetVal
.append(PackageDir
)
1107 IncludesList
= Package
.Includes
1108 if Package
._PrivateIncludes
:
1109 if not self
.MetaFile
.OriginalPath
.Path
.startswith(PackageDir
):
1110 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1111 for Inc
in IncludesList
:
1112 if Inc
not in RetVal
:
1113 RetVal
.append(str(Inc
))
1117 def IncludePathLength(self
):
1118 return sum(len(inc
)+1 for inc
in self
.IncludePathList
)
1120 ## Get the list of include paths from the packages
1122 # @IncludesList list The list path
1125 def PackageIncludePathList(self
):
1127 for Package
in self
.Module
.Packages
:
1128 PackageDir
= mws
.join(self
.WorkspaceDir
, Package
.MetaFile
.Dir
)
1129 IncludesList
= Package
.Includes
1130 if Package
._PrivateIncludes
:
1131 if not self
.MetaFile
.Path
.startswith(PackageDir
):
1132 IncludesList
= list(set(Package
.Includes
).difference(set(Package
._PrivateIncludes
)))
1135 ## Get HII EX PCDs which maybe used by VFR
1137 # efivarstore used by VFR may relate with HII EX PCDs
1138 # Get the variable name and GUID from efivarstore and HII EX PCD
1139 # List the HII EX PCDs in As Built INF if both name and GUID match.
1141 # @retval list HII EX PCDs
1143 def _GetPcdsMaybeUsedByVfr(self
):
1144 if not self
.SourceFileList
:
1148 for SrcFile
in self
.SourceFileList
:
1149 if SrcFile
.Ext
.lower() != '.vfr':
1151 Vfri
= os
.path
.join(self
.OutputDir
, SrcFile
.BaseName
+ '.i')
1152 if not os
.path
.exists(Vfri
):
1154 VfriFile
= open(Vfri
, 'r')
1155 Content
= VfriFile
.read()
1157 Pos
= Content
.find('efivarstore')
1160 # Make sure 'efivarstore' is the start of efivarstore statement
1161 # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
1164 while Index
>= 0 and Content
[Index
] in ' \t\r\n':
1166 if Index
>= 0 and Content
[Index
] != ';':
1167 Pos
= Content
.find('efivarstore', Pos
+ len('efivarstore'))
1170 # 'efivarstore' must be followed by name and guid
1172 Name
= gEfiVarStoreNamePattern
.search(Content
, Pos
)
1175 Guid
= gEfiVarStoreGuidPattern
.search(Content
, Pos
)
1178 NameArray
= _ConvertStringToByteArray('L"' + Name
.group(1) + '"')
1179 NameGuids
.add((NameArray
, GuidStructureStringToGuidString(Guid
.group(1))))
1180 Pos
= Content
.find('efivarstore', Name
.end())
1184 for Pcd
in self
.PlatformInfo
.Pcds
.values():
1185 if Pcd
.Type
!= TAB_PCDS_DYNAMIC_EX_HII
:
1187 for SkuInfo
in Pcd
.SkuInfoList
.values():
1188 Value
= GuidValue(SkuInfo
.VariableGuid
, self
.PlatformInfo
.PackageList
, self
.MetaFile
.Path
)
1191 Name
= _ConvertStringToByteArray(SkuInfo
.VariableName
)
1192 Guid
= GuidStructureStringToGuidString(Value
)
1193 if (Name
, Guid
) in NameGuids
and Pcd
not in HiiExPcds
:
1194 HiiExPcds
.append(Pcd
)
1199 def _GenOffsetBin(self
):
1201 for SourceFile
in self
.Module
.Sources
:
1202 if SourceFile
.Type
.upper() == ".VFR" :
1204 # search the .map file to find the offset of vfr binary in the PE32+/TE file.
1206 VfrUniBaseName
[SourceFile
.BaseName
] = (SourceFile
.BaseName
+ "Bin")
1207 elif SourceFile
.Type
.upper() == ".UNI" :
1209 # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
1211 VfrUniBaseName
["UniOffsetName"] = (self
.Name
+ "Strings")
1213 if not VfrUniBaseName
:
1215 MapFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".map")
1216 EfiFileName
= os
.path
.join(self
.OutputDir
, self
.Name
+ ".efi")
1217 VfrUniOffsetList
= GetVariableOffset(MapFileName
, EfiFileName
, list(VfrUniBaseName
.values()))
1218 if not VfrUniOffsetList
:
1221 OutputName
= '%sOffset.bin' % self
.Name
1222 UniVfrOffsetFileName
= os
.path
.join( self
.OutputDir
, OutputName
)
1225 fInputfile
= open(UniVfrOffsetFileName
, "wb+", 0)
1227 EdkLogger
.error("build", FILE_OPEN_FAILURE
, "File open failed for %s" % UniVfrOffsetFileName
, None)
1229 # Use a instance of BytesIO to cache data
1230 fStringIO
= BytesIO()
1232 for Item
in VfrUniOffsetList
:
1233 if (Item
[0].find("Strings") != -1):
1235 # UNI offset in image.
1237 # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
1239 UniGuid
= b
'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
1240 fStringIO
.write(UniGuid
)
1241 UniValue
= pack ('Q', int (Item
[1], 16))
1242 fStringIO
.write (UniValue
)
1245 # VFR binary offset in image.
1247 # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
1249 VfrGuid
= b
'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
1250 fStringIO
.write(VfrGuid
)
1251 VfrValue
= pack ('Q', int (Item
[1], 16))
1252 fStringIO
.write (VfrValue
)
1254 # write data into file.
1257 fInputfile
.write (fStringIO
.getvalue())
1259 EdkLogger
.error("build", FILE_WRITE_FAILURE
, "Write data to file %s failed, please check whether the "
1260 "file been locked or using by other applications." %UniVfrOffsetFileName
, None)
1266 def OutputFile(self
):
1268 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1269 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1270 for Item
in self
.CodaTargetList
:
1271 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1273 if self
.DepexGenerated
:
1274 retVal
.add(self
.Name
+ '.depex')
1276 Bin
= self
._GenOffsetBin
()
1280 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1282 if File
.lower().endswith('.pdb'):
1287 ## Create AsBuilt INF file the module
1289 def CreateAsBuiltInf(self
):
1291 if self
.IsAsBuiltInfCreated
:
1294 # Skip INF file generation for libraries
1298 # Skip the following code for modules with no source files
1299 if not self
.SourceFileList
:
1302 # Skip the following code for modules without any binary files
1303 if self
.BinaryFileList
:
1306 ### TODO: How to handles mixed source and binary modules
1308 # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
1309 # Also find all packages that the DynamicEx PCDs depend on
1314 PcdTokenSpaceList
= []
1315 for Pcd
in self
.ModulePcdList
+ self
.LibraryPcdList
:
1316 if Pcd
.Type
== TAB_PCDS_PATCHABLE_IN_MODULE
:
1317 PatchablePcds
.append(Pcd
)
1318 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_PATCHABLE_IN_MODULE
))
1319 elif Pcd
.Type
in PCD_DYNAMIC_EX_TYPE_SET
:
1322 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
))
1323 PcdCheckList
.append((Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
))
1324 PcdTokenSpaceList
.append(Pcd
.TokenSpaceGuidCName
)
1325 GuidList
= OrderedDict(self
.GuidList
)
1326 for TokenSpace
in self
.GetGuidsUsedByPcd
:
1327 # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
1328 # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
1329 if TokenSpace
not in PcdTokenSpaceList
and TokenSpace
in GuidList
:
1330 GuidList
.pop(TokenSpace
)
1331 CheckList
= (GuidList
, self
.PpiList
, self
.ProtocolList
, PcdCheckList
)
1332 for Package
in self
.DerivedPackageList
:
1333 if Package
in Packages
:
1335 BeChecked
= (Package
.Guids
, Package
.Ppis
, Package
.Protocols
, Package
.Pcds
)
1337 for Index
in range(len(BeChecked
)):
1338 for Item
in CheckList
[Index
]:
1339 if Item
in BeChecked
[Index
]:
1340 Packages
.append(Package
)
1346 VfrPcds
= self
._GetPcdsMaybeUsedByVfr
()
1347 for Pkg
in self
.PlatformInfo
.PackageList
:
1350 for VfrPcd
in VfrPcds
:
1351 if ((VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC_EX
) in Pkg
.Pcds
or
1352 (VfrPcd
.TokenCName
, VfrPcd
.TokenSpaceGuidCName
, TAB_PCDS_DYNAMIC
) in Pkg
.Pcds
):
1353 Packages
.append(Pkg
)
1356 ModuleType
= SUP_MODULE_DXE_DRIVER
if self
.ModuleType
== SUP_MODULE_UEFI_DRIVER
and self
.DepexGenerated
else self
.ModuleType
1357 DriverType
= self
.PcdIsDriver
if self
.PcdIsDriver
else ''
1359 MDefs
= self
.Module
.Defines
1362 'module_name' : self
.Name
,
1363 'module_guid' : Guid
,
1364 'module_module_type' : ModuleType
,
1365 'module_version_string' : [MDefs
['VERSION_STRING']] if 'VERSION_STRING' in MDefs
else [],
1366 'pcd_is_driver_string' : [],
1367 'module_uefi_specification_version' : [],
1368 'module_pi_specification_version' : [],
1369 'module_entry_point' : self
.Module
.ModuleEntryPointList
,
1370 'module_unload_image' : self
.Module
.ModuleUnloadImageList
,
1371 'module_constructor' : self
.Module
.ConstructorList
,
1372 'module_destructor' : self
.Module
.DestructorList
,
1373 'module_shadow' : [MDefs
['SHADOW']] if 'SHADOW' in MDefs
else [],
1374 'module_pci_vendor_id' : [MDefs
['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs
else [],
1375 'module_pci_device_id' : [MDefs
['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs
else [],
1376 'module_pci_class_code' : [MDefs
['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs
else [],
1377 'module_pci_revision' : [MDefs
['PCI_REVISION']] if 'PCI_REVISION' in MDefs
else [],
1378 'module_build_number' : [MDefs
['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs
else [],
1379 'module_spec' : [MDefs
['SPEC']] if 'SPEC' in MDefs
else [],
1380 'module_uefi_hii_resource_section' : [MDefs
['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs
else [],
1381 'module_uni_file' : [MDefs
['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs
else [],
1382 'module_arch' : self
.Arch
,
1383 'package_item' : [Package
.MetaFile
.File
.replace('\\', '/') for Package
in Packages
],
1385 'patchablepcd_item' : [],
1387 'protocol_item' : [],
1391 'libraryclasses_item' : []
1394 if 'MODULE_UNI_FILE' in MDefs
:
1395 UNIFile
= os
.path
.join(self
.MetaFile
.Dir
, MDefs
['MODULE_UNI_FILE'])
1396 if os
.path
.isfile(UNIFile
):
1397 shutil
.copy2(UNIFile
, self
.OutputDir
)
1399 if self
.AutoGenVersion
> int(gInfSpecVersion
, 0):
1400 AsBuiltInfDict
['module_inf_version'] = '0x%08x' % self
.AutoGenVersion
1402 AsBuiltInfDict
['module_inf_version'] = gInfSpecVersion
1405 AsBuiltInfDict
['pcd_is_driver_string'].append(DriverType
)
1407 if 'UEFI_SPECIFICATION_VERSION' in self
.Specification
:
1408 AsBuiltInfDict
['module_uefi_specification_version'].append(self
.Specification
['UEFI_SPECIFICATION_VERSION'])
1409 if 'PI_SPECIFICATION_VERSION' in self
.Specification
:
1410 AsBuiltInfDict
['module_pi_specification_version'].append(self
.Specification
['PI_SPECIFICATION_VERSION'])
1412 OutputDir
= self
.OutputDir
.replace('\\', '/').strip('/')
1413 DebugDir
= self
.DebugDir
.replace('\\', '/').strip('/')
1414 for Item
in self
.CodaTargetList
:
1415 File
= Item
.Target
.Path
.replace('\\', '/').strip('/').replace(DebugDir
, '').replace(OutputDir
, '').strip('/')
1416 if os
.path
.isabs(File
):
1417 File
= File
.replace('\\', '/').strip('/').replace(OutputDir
, '').strip('/')
1418 if Item
.Target
.Ext
.lower() == '.aml':
1419 AsBuiltInfDict
['binary_item'].append('ASL|' + File
)
1420 elif Item
.Target
.Ext
.lower() == '.acpi':
1421 AsBuiltInfDict
['binary_item'].append('ACPI|' + File
)
1422 elif Item
.Target
.Ext
.lower() == '.efi':
1423 AsBuiltInfDict
['binary_item'].append('PE32|' + self
.Name
+ '.efi')
1425 AsBuiltInfDict
['binary_item'].append('BIN|' + File
)
1426 if not self
.DepexGenerated
:
1427 DepexFile
= os
.path
.join(self
.OutputDir
, self
.Name
+ '.depex')
1428 if os
.path
.exists(DepexFile
):
1429 self
.DepexGenerated
= True
1430 if self
.DepexGenerated
:
1431 if self
.ModuleType
in [SUP_MODULE_PEIM
]:
1432 AsBuiltInfDict
['binary_item'].append('PEI_DEPEX|' + self
.Name
+ '.depex')
1433 elif self
.ModuleType
in [SUP_MODULE_DXE_DRIVER
, SUP_MODULE_DXE_RUNTIME_DRIVER
, SUP_MODULE_DXE_SAL_DRIVER
, SUP_MODULE_UEFI_DRIVER
]:
1434 AsBuiltInfDict
['binary_item'].append('DXE_DEPEX|' + self
.Name
+ '.depex')
1435 elif self
.ModuleType
in [SUP_MODULE_DXE_SMM_DRIVER
]:
1436 AsBuiltInfDict
['binary_item'].append('SMM_DEPEX|' + self
.Name
+ '.depex')
1438 Bin
= self
._GenOffsetBin
()
1440 AsBuiltInfDict
['binary_item'].append('BIN|%s' % Bin
)
1442 for Root
, Dirs
, Files
in os
.walk(OutputDir
):
1444 if File
.lower().endswith('.pdb'):
1445 AsBuiltInfDict
['binary_item'].append('DISPOSABLE|' + File
)
1446 HeaderComments
= self
.Module
.HeaderComments
1448 for Index
in range(len(HeaderComments
)):
1449 if HeaderComments
[Index
].find('@BinaryHeader') != -1:
1450 HeaderComments
[Index
] = HeaderComments
[Index
].replace('@BinaryHeader', '@file')
1453 AsBuiltInfDict
['header_comments'] = '\n'.join(HeaderComments
[StartPos
:]).replace(':#', '://')
1454 AsBuiltInfDict
['tail_comments'] = '\n'.join(self
.Module
.TailComments
)
1457 (self
.ProtocolList
, self
._ProtocolComments
, 'protocol_item'),
1458 (self
.PpiList
, self
._PpiComments
, 'ppi_item'),
1459 (GuidList
, self
._GuidComments
, 'guid_item')
1461 for Item
in GenList
:
1462 for CName
in Item
[0]:
1463 Comments
= '\n '.join(Item
[1][CName
]) if CName
in Item
[1] else ''
1464 Entry
= Comments
+ '\n ' + CName
if Comments
else CName
1465 AsBuiltInfDict
[Item
[2]].append(Entry
)
1466 PatchList
= parsePcdInfoFromMapFile(
1467 os
.path
.join(self
.OutputDir
, self
.Name
+ '.map'),
1468 os
.path
.join(self
.OutputDir
, self
.Name
+ '.efi')
1471 for Pcd
in PatchablePcds
:
1472 TokenCName
= Pcd
.TokenCName
1473 for PcdItem
in GlobalData
.MixedPcd
:
1474 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1475 TokenCName
= PcdItem
[0]
1477 for PatchPcd
in PatchList
:
1478 if TokenCName
== PatchPcd
[0]:
1483 if Pcd
.DatumType
== 'BOOLEAN':
1484 BoolValue
= Pcd
.DefaultValue
.upper()
1485 if BoolValue
== 'TRUE':
1486 Pcd
.DefaultValue
= '1'
1487 elif BoolValue
== 'FALSE':
1488 Pcd
.DefaultValue
= '0'
1490 if Pcd
.DatumType
in TAB_PCD_NUMERIC_TYPES
:
1491 HexFormat
= '0x%02x'
1492 if Pcd
.DatumType
== TAB_UINT16
:
1493 HexFormat
= '0x%04x'
1494 elif Pcd
.DatumType
== TAB_UINT32
:
1495 HexFormat
= '0x%08x'
1496 elif Pcd
.DatumType
== TAB_UINT64
:
1497 HexFormat
= '0x%016x'
1498 PcdValue
= HexFormat
% int(Pcd
.DefaultValue
, 0)
1500 if Pcd
.MaxDatumSize
is None or Pcd
.MaxDatumSize
== '':
1501 EdkLogger
.error("build", AUTOGEN_ERROR
,
1502 "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1504 ArraySize
= int(Pcd
.MaxDatumSize
, 0)
1505 PcdValue
= Pcd
.DefaultValue
1506 if PcdValue
[0] != '{':
1508 if PcdValue
[0] == 'L':
1510 PcdValue
= PcdValue
.lstrip('L')
1511 PcdValue
= eval(PcdValue
)
1513 for Index
in range(0, len(PcdValue
)):
1515 CharVal
= ord(PcdValue
[Index
])
1516 NewValue
= NewValue
+ '0x%02x' % (CharVal
& 0x00FF) + ', ' \
1517 + '0x%02x' % (CharVal
>> 8) + ', '
1519 NewValue
= NewValue
+ '0x%02x' % (ord(PcdValue
[Index
]) % 0x100) + ', '
1522 Padding
= Padding
* 2
1523 ArraySize
= ArraySize
// 2
1524 if ArraySize
< (len(PcdValue
) + 1):
1525 if Pcd
.MaxSizeUserSet
:
1526 EdkLogger
.error("build", AUTOGEN_ERROR
,
1527 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1530 ArraySize
= len(PcdValue
) + 1
1531 if ArraySize
> len(PcdValue
) + 1:
1532 NewValue
= NewValue
+ Padding
* (ArraySize
- len(PcdValue
) - 1)
1533 PcdValue
= NewValue
+ Padding
.strip().rstrip(',') + '}'
1534 elif len(PcdValue
.split(',')) <= ArraySize
:
1535 PcdValue
= PcdValue
.rstrip('}') + ', 0x00' * (ArraySize
- len(PcdValue
.split(',')))
1538 if Pcd
.MaxSizeUserSet
:
1539 EdkLogger
.error("build", AUTOGEN_ERROR
,
1540 "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd
.TokenSpaceGuidCName
, TokenCName
)
1543 ArraySize
= len(PcdValue
) + 1
1544 PcdItem
= '%s.%s|%s|0x%X' % \
1545 (Pcd
.TokenSpaceGuidCName
, TokenCName
, PcdValue
, PatchPcd
[1])
1547 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1548 PcdComments
= '\n '.join(self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
])
1550 PcdItem
= PcdComments
+ '\n ' + PcdItem
1551 AsBuiltInfDict
['patchablepcd_item'].append(PcdItem
)
1553 for Pcd
in Pcds
+ VfrPcds
:
1556 TokenCName
= Pcd
.TokenCName
1557 for PcdItem
in GlobalData
.MixedPcd
:
1558 if (Pcd
.TokenCName
, Pcd
.TokenSpaceGuidCName
) in GlobalData
.MixedPcd
[PcdItem
]:
1559 TokenCName
= PcdItem
[0]
1561 if Pcd
.Type
== TAB_PCDS_DYNAMIC_EX_HII
:
1562 for SkuName
in Pcd
.SkuInfoList
:
1563 SkuInfo
= Pcd
.SkuInfoList
[SkuName
]
1564 HiiInfo
= '## %s|%s|%s' % (SkuInfo
.VariableName
, SkuInfo
.VariableGuid
, SkuInfo
.VariableOffset
)
1566 if (Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
) in self
._PcdComments
:
1567 PcdCommentList
= self
._PcdComments
[Pcd
.TokenSpaceGuidCName
, Pcd
.TokenCName
][:]
1571 for Index
, Comment
in enumerate(PcdCommentList
):
1572 for Usage
in UsageList
:
1573 if Comment
.find(Usage
) != -1:
1577 if UsageIndex
!= -1:
1578 PcdCommentList
[UsageIndex
] = '## %s %s %s' % (UsageStr
, HiiInfo
, PcdCommentList
[UsageIndex
].replace(UsageStr
, ''))
1580 PcdCommentList
.append('## UNDEFINED ' + HiiInfo
)
1581 PcdComments
= '\n '.join(PcdCommentList
)
1582 PcdEntry
= Pcd
.TokenSpaceGuidCName
+ '.' + TokenCName
1584 PcdEntry
= PcdComments
+ '\n ' + PcdEntry
1585 AsBuiltInfDict
['pcd_item'].append(PcdEntry
)
1586 for Item
in self
.BuildOption
:
1587 if 'FLAGS' in self
.BuildOption
[Item
]:
1588 AsBuiltInfDict
['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self
.ToolChainFamily
, self
.BuildTarget
, self
.ToolChain
, self
.Arch
, Item
, self
.BuildOption
[Item
]['FLAGS'].strip()))
1590 # Generated LibraryClasses section in comments.
1591 for Library
in self
.LibraryAutoGenList
:
1592 AsBuiltInfDict
['libraryclasses_item'].append(Library
.MetaFile
.File
.replace('\\', '/'))
1594 # Generated UserExtensions TianoCore section.
1595 # All tianocore user extensions are copied.
1597 for TianoCore
in self
._GetTianoCoreUserExtensionList
():
1598 UserExtStr
+= '\n'.join(TianoCore
)
1599 ExtensionFile
= os
.path
.join(self
.MetaFile
.Dir
, TianoCore
[1])
1600 if os
.path
.isfile(ExtensionFile
):
1601 shutil
.copy2(ExtensionFile
, self
.OutputDir
)
1602 AsBuiltInfDict
['userextension_tianocore_item'] = UserExtStr
1604 # Generated depex expression section in comments.
1605 DepexExpression
= self
._GetDepexExpresionString
()
1606 AsBuiltInfDict
['depexsection_item'] = DepexExpression
if DepexExpression
else ''
1608 AsBuiltInf
= TemplateString()
1609 AsBuiltInf
.Append(gAsBuiltInfHeaderString
.Replace(AsBuiltInfDict
))
1611 SaveFileOnChange(os
.path
.join(self
.OutputDir
, self
.Name
+ '.inf'), str(AsBuiltInf
), False)
1613 self
.IsAsBuiltInfCreated
= True
1615 def CacheCopyFile(self
, OriginDir
, CopyDir
, File
):
1616 sub_dir
= os
.path
.relpath(File
, CopyDir
)
1617 destination_file
= os
.path
.join(OriginDir
, sub_dir
)
1618 destination_dir
= os
.path
.dirname(destination_file
)
1619 CreateDirectory(destination_dir
)
1621 CopyFileOnChange(File
, destination_dir
)
1623 EdkLogger
.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File
, destination_dir
))
1626 def CopyModuleToCache(self
):
1627 self
.GenPreMakefileHash(GlobalData
.gCacheIR
)
1628 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1629 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1630 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1633 self
.GenMakeHash(GlobalData
.gCacheIR
)
1634 if not (self
.MetaFile
.Path
, self
.Arch
) in GlobalData
.gCacheIR
or \
1635 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1636 not GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1637 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1640 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1641 FileDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
, MakeHashStr
)
1642 FfsDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
, MakeHashStr
)
1644 CreateDirectory (FileDir
)
1645 self
.SaveHashChainFileToCache(GlobalData
.gCacheIR
)
1646 ModuleFile
= path
.join(self
.OutputDir
, self
.Name
+ '.inf')
1647 if os
.path
.exists(ModuleFile
):
1648 CopyFileOnChange(ModuleFile
, FileDir
)
1649 if not self
.OutputFile
:
1650 Ma
= self
.BuildDatabase
[self
.MetaFile
, self
.Arch
, self
.BuildTarget
, self
.ToolChain
]
1651 self
.OutputFile
= Ma
.Binaries
1652 for File
in self
.OutputFile
:
1654 if not os
.path
.isabs(File
):
1655 File
= os
.path
.join(self
.OutputDir
, File
)
1656 if os
.path
.exists(File
):
1657 sub_dir
= os
.path
.relpath(File
, self
.OutputDir
)
1658 destination_file
= os
.path
.join(FileDir
, sub_dir
)
1659 destination_dir
= os
.path
.dirname(destination_file
)
1660 CreateDirectory(destination_dir
)
1661 CopyFileOnChange(File
, destination_dir
)
1663 def SaveHashChainFileToCache(self
, gDict
):
1664 if not GlobalData
.gBinCacheDest
:
1667 self
.GenPreMakefileHash(gDict
)
1668 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1669 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
1670 EdkLogger
.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1673 self
.GenMakeHash(gDict
)
1674 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
1675 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
or \
1676 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
1677 EdkLogger
.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
1680 # save the hash chain list as cache file
1681 MakeHashStr
= str(GlobalData
.gCacheIR
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
)
1682 CacheDestDir
= path
.join(GlobalData
.gBinCacheDest
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
1683 CacheHashDestDir
= path
.join(CacheDestDir
, MakeHashStr
)
1684 ModuleHashPair
= path
.join(CacheDestDir
, self
.Name
+ ".ModuleHashPair")
1685 MakeHashChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".MakeHashChain")
1686 ModuleFilesChain
= path
.join(CacheHashDestDir
, self
.Name
+ ".ModuleFilesChain")
1688 # save the HashChainDict as json file
1689 CreateDirectory (CacheDestDir
)
1690 CreateDirectory (CacheHashDestDir
)
1692 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
1693 if os
.path
.exists(ModuleHashPair
):
1694 f
= open(ModuleHashPair
, 'r')
1695 ModuleHashPairList
= json
.load(f
)
1697 PreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
1698 MakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
1699 ModuleHashPairList
.append((PreMakeHash
, MakeHash
))
1700 ModuleHashPairList
= list(set(map(tuple, ModuleHashPairList
)))
1701 with
open(ModuleHashPair
, 'w') as f
:
1702 json
.dump(ModuleHashPairList
, f
, indent
=2)
1704 EdkLogger
.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair
)
1708 with
open(MakeHashChain
, 'w') as f
:
1709 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
, f
, indent
=2)
1711 EdkLogger
.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain
)
1715 with
open(ModuleFilesChain
, 'w') as f
:
1716 json
.dump(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
, f
, indent
=2)
1718 EdkLogger
.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain
)
1721 # save the autogenfile and makefile for debug usage
1722 CacheDebugDir
= path
.join(CacheHashDestDir
, "CacheDebug")
1723 CreateDirectory (CacheDebugDir
)
1724 CopyFileOnChange(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
, CacheDebugDir
)
1725 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1726 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].AutoGenFileList
:
1727 CopyFileOnChange(str(File
), CacheDebugDir
)
1731 ## Create makefile for the module and its dependent libraries
1733 # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
1734 # dependent libraries will be created
1736 @cached_class_function
1737 def CreateMakeFile(self
, CreateLibraryMakeFile
=True, GenFfsList
= []):
1738 gDict
= GlobalData
.gCacheIR
1739 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1740 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
1743 # nest this function inside it's only caller.
1744 def CreateTimeStamp():
1745 FileSet
= {self
.MetaFile
.Path
}
1747 for SourceFile
in self
.Module
.Sources
:
1748 FileSet
.add (SourceFile
.Path
)
1750 for Lib
in self
.DependentLibraryList
:
1751 FileSet
.add (Lib
.MetaFile
.Path
)
1753 for f
in self
.AutoGenDepSet
:
1754 FileSet
.add (f
.Path
)
1756 if os
.path
.exists (self
.TimeStampPath
):
1757 os
.remove (self
.TimeStampPath
)
1758 with
open(self
.TimeStampPath
, 'w+') as fd
:
1763 # Ignore generating makefile when it is a binary module
1764 if self
.IsBinaryModule
:
1767 self
.GenFfsList
= GenFfsList
1769 if not self
.IsLibrary
and CreateLibraryMakeFile
:
1770 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1771 LibraryAutoGen
.CreateMakeFile()
1773 # CanSkip uses timestamps to determine build skipping
1777 if len(self
.CustomMakefile
) == 0:
1778 Makefile
= GenMake
.ModuleMakefile(self
)
1780 Makefile
= GenMake
.CustomMakefile(self
)
1781 if Makefile
.Generate():
1782 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated makefile for module %s [%s]" %
1783 (self
.Name
, self
.Arch
))
1785 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of makefile for module %s [%s]" %
1786 (self
.Name
, self
.Arch
))
1790 MakefileType
= Makefile
._FileType
1791 MakefileName
= Makefile
._FILE
_NAME
_[MakefileType
]
1792 MakefilePath
= os
.path
.join(self
.MakeFileDir
, MakefileName
)
1794 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1795 MewIR
.MakefilePath
= MakefilePath
1796 MewIR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1797 MewIR
.CreateMakeFileDone
= True
1798 with GlobalData
.file_lock
:
1800 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1801 IR
.MakefilePath
= MakefilePath
1802 IR
.DependencyHeaderFileSet
= Makefile
.DependencyHeaderFileSet
1803 IR
.CreateMakeFileDone
= True
1804 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1806 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1808 def CopyBinaryFiles(self
):
1809 for File
in self
.Module
.Binaries
:
1811 DstPath
= os
.path
.join(self
.OutputDir
, os
.path
.basename(SrcPath
))
1812 CopyLongFilePath(SrcPath
, DstPath
)
1813 ## Create autogen code for the module and its dependent libraries
1815 # @param CreateLibraryCodeFile Flag indicating if or not the code of
1816 # dependent libraries will be created
1818 def CreateCodeFile(self
, CreateLibraryCodeFile
=True):
1819 gDict
= GlobalData
.gCacheIR
1820 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
1821 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
1824 if self
.IsCodeFileCreated
:
1827 # Need to generate PcdDatabase even PcdDriver is binarymodule
1828 if self
.IsBinaryModule
and self
.PcdIsDriver
!= '':
1829 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
1831 if self
.IsBinaryModule
:
1833 self
.CopyBinaryFiles()
1836 if not self
.IsLibrary
and CreateLibraryCodeFile
:
1837 for LibraryAutoGen
in self
.LibraryAutoGenList
:
1838 LibraryAutoGen
.CreateCodeFile()
1840 # CanSkip uses timestamps to determine build skipping
1845 IgoredAutoGenList
= []
1847 for File
in self
.AutoGenFileList
:
1848 if GenC
.Generate(File
.Path
, self
.AutoGenFileList
[File
], File
.IsBinary
):
1849 AutoGenList
.append(str(File
))
1851 IgoredAutoGenList
.append(str(File
))
1854 for ModuleType
in self
.DepexList
:
1855 # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
1856 if len(self
.DepexList
[ModuleType
]) == 0 or ModuleType
== SUP_MODULE_USER_DEFINED
or ModuleType
== SUP_MODULE_HOST_APPLICATION
:
1859 Dpx
= GenDepex
.DependencyExpression(self
.DepexList
[ModuleType
], ModuleType
, True)
1860 DpxFile
= gAutoGenDepexFileName
% {"module_name" : self
.Name
}
1862 if len(Dpx
.PostfixNotation
) != 0:
1863 self
.DepexGenerated
= True
1865 if Dpx
.Generate(path
.join(self
.OutputDir
, DpxFile
)):
1866 AutoGenList
.append(str(DpxFile
))
1868 IgoredAutoGenList
.append(str(DpxFile
))
1870 if IgoredAutoGenList
== []:
1871 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] files for module %s [%s]" %
1872 (" ".join(AutoGenList
), self
.Name
, self
.Arch
))
1873 elif AutoGenList
== []:
1874 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Skipped the generation of [%s] files for module %s [%s]" %
1875 (" ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1877 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Generated [%s] (skipped %s) files for module %s [%s]" %
1878 (" ".join(AutoGenList
), " ".join(IgoredAutoGenList
), self
.Name
, self
.Arch
))
1880 self
.IsCodeFileCreated
= True
1881 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
1882 MewIR
.CreateCodeFileDone
= True
1883 with GlobalData
.file_lock
:
1885 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1886 IR
.CreateCodeFileDone
= True
1887 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
1889 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
1893 ## Summarize the ModuleAutoGen objects of all libraries used by this module
1895 def LibraryAutoGenList(self
):
1897 for Library
in self
.DependentLibraryList
:
1904 self
.PlatformInfo
.MetaFile
,
1908 if La
not in RetVal
:
1910 for Lib
in La
.CodaTargetList
:
1911 self
._ApplyBuildRule
(Lib
.Target
, TAB_UNKNOWN_FILE
)
1914 def GenModuleHash(self
):
1915 # Initialize a dictionary for each arch type
1916 if self
.Arch
not in GlobalData
.gModuleHash
:
1917 GlobalData
.gModuleHash
[self
.Arch
] = {}
1919 # Early exit if module or library has been hashed and is in memory
1920 if self
.Name
in GlobalData
.gModuleHash
[self
.Arch
]:
1921 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1923 # Initialze hash object
1926 # Add Platform level hash
1927 m
.update(GlobalData
.gPlatformHash
.encode('utf-8'))
1929 # Add Package level hash
1930 if self
.DependentPackageList
:
1931 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
1932 if Pkg
.PackageName
in GlobalData
.gPackageHash
:
1933 m
.update(GlobalData
.gPackageHash
[Pkg
.PackageName
].encode('utf-8'))
1936 if self
.LibraryAutoGenList
:
1937 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
1938 if Lib
.Name
not in GlobalData
.gModuleHash
[self
.Arch
]:
1940 m
.update(GlobalData
.gModuleHash
[self
.Arch
][Lib
.Name
].encode('utf-8'))
1943 f
= open(str(self
.MetaFile
), 'rb')
1948 # Add Module's source files
1949 if self
.SourceFileList
:
1950 for File
in sorted(self
.SourceFileList
, key
=lambda x
: str(x
)):
1951 f
= open(str(File
), 'rb')
1956 GlobalData
.gModuleHash
[self
.Arch
][self
.Name
] = m
.hexdigest()
1958 return GlobalData
.gModuleHash
[self
.Arch
][self
.Name
].encode('utf-8')
1960 def GenModuleFilesHash(self
, gDict
):
1961 # Early exit if module or library has been hashed and is in memory
1962 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
1963 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
:
1964 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
1966 DependencyFileSet
= set()
1967 # Add Module Meta file
1968 DependencyFileSet
.add(self
.MetaFile
)
1970 # Add Module's source files
1971 if self
.SourceFileList
:
1972 for File
in set(self
.SourceFileList
):
1973 DependencyFileSet
.add(File
)
1975 # Add modules's include header files
1976 # Search dependency file list for each source file
1979 for Target
in self
.IntroTargetList
:
1980 SourceFileList
.extend(Target
.Inputs
)
1981 OutPutFileList
.extend(Target
.Outputs
)
1983 for Item
in OutPutFileList
:
1984 if Item
in SourceFileList
:
1985 SourceFileList
.remove(Item
)
1987 for file_path
in self
.IncludePathList
+ self
.BuildOptionIncPathList
:
1988 # skip the folders in platform BuildDir which are not been generated yet
1989 if file_path
.startswith(os
.path
.abspath(self
.PlatformInfo
.BuildDir
)+os
.sep
):
1991 SearchList
.append(file_path
)
1992 FileDependencyDict
= {}
1993 ForceIncludedFile
= []
1994 for F
in SourceFileList
:
1995 # skip the files which are not been generated yet, because
1996 # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c
1997 if not os
.path
.exists(F
.Path
):
1999 FileDependencyDict
[F
] = GenMake
.GetDependencyList(self
, self
.FileDependCache
, F
, ForceIncludedFile
, SearchList
)
2001 if FileDependencyDict
:
2002 for Dependency
in FileDependencyDict
.values():
2003 DependencyFileSet
.update(set(Dependency
))
2005 # Caculate all above dependency files hash
2006 # Initialze hash object
2009 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2010 if not os
.path
.exists(str(File
)):
2011 EdkLogger
.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2013 f
= open(str(File
), 'rb')
2017 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2020 MewIR
= ModuleBuildCacheIR(self
.MetaFile
.Path
, self
.Arch
)
2021 MewIR
.ModuleFilesHashDigest
= m
.digest()
2022 MewIR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2023 MewIR
.ModuleFilesChain
= FileList
2024 with GlobalData
.file_lock
:
2026 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2027 IR
.ModuleFilesHashDigest
= m
.digest()
2028 IR
.ModuleFilesHashHexDigest
= m
.hexdigest()
2029 IR
.ModuleFilesChain
= FileList
2030 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2032 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = MewIR
2034 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2036 def GenPreMakefileHash(self
, gDict
):
2037 # Early exit if module or library has been hashed and is in memory
2038 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2039 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2040 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2042 # skip binary module
2043 if self
.IsBinaryModule
:
2046 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2047 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2048 self
.GenModuleFilesHash(gDict
)
2050 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2051 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2052 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2055 # Initialze hash object
2058 # Add Platform level hash
2059 if ('PlatformHash') in gDict
:
2060 m
.update(gDict
[('PlatformHash')].encode('utf-8'))
2062 EdkLogger
.quiet("[cache warning]: PlatformHash is missing")
2064 # Add Package level hash
2065 if self
.DependentPackageList
:
2066 for Pkg
in sorted(self
.DependentPackageList
, key
=lambda x
: x
.PackageName
):
2067 if (Pkg
.PackageName
, 'PackageHash') in gDict
:
2068 m
.update(gDict
[(Pkg
.PackageName
, 'PackageHash')].encode('utf-8'))
2070 EdkLogger
.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg
.PackageName
, self
.MetaFile
.Name
, self
.Arch
))
2073 if self
.LibraryAutoGenList
:
2074 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2075 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2076 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
:
2077 Lib
.GenPreMakefileHash(gDict
)
2078 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].ModuleFilesHashDigest
)
2081 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2083 with GlobalData
.file_lock
:
2084 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2085 IR
.PreMakefileHashHexDigest
= m
.hexdigest()
2086 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2088 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2090 def GenMakeHeaderFilesHash(self
, gDict
):
2091 # Early exit if module or library has been hashed and is in memory
2092 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2093 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2094 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2096 # skip binary module
2097 if self
.IsBinaryModule
:
2100 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2101 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
:
2103 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.libConstPcd
:
2104 self
.ConstPcd
= GlobalData
.libConstPcd
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2105 if (self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
) in GlobalData
.Refes
:
2106 self
.ReferenceModules
= GlobalData
.Refes
[(self
.MetaFile
.File
,self
.MetaFile
.Root
,self
.Arch
,self
.MetaFile
.Path
)]
2107 self
.CreateCodeFile()
2108 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2109 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2110 self
.CreateMakeFile(GenFfsList
=GlobalData
.FfsCmd
.get((self
.MetaFile
.File
, self
.Arch
),[]))
2112 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2113 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateCodeFileDone
or \
2114 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].CreateMakeFileDone
:
2115 EdkLogger
.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2118 DependencyFileSet
= set()
2120 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
:
2121 DependencyFileSet
.add(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakefilePath
)
2123 EdkLogger
.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2126 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2127 for File
in gDict
[(self
.MetaFile
.Path
, self
.Arch
)].DependencyHeaderFileSet
:
2128 DependencyFileSet
.add(File
)
2130 EdkLogger
.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2133 if self
.AutoGenFileList
:
2134 for File
in set(self
.AutoGenFileList
):
2135 DependencyFileSet
.add(File
)
2137 # Caculate all above dependency files hash
2138 # Initialze hash object
2141 for File
in sorted(DependencyFileSet
, key
=lambda x
: str(x
)):
2142 if not os
.path
.exists(str(File
)):
2143 EdkLogger
.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File
, self
.MetaFile
.Path
, self
.Arch
))
2145 f
= open(str(File
), 'rb')
2149 FileList
.append((str(File
), hashlib
.md5(Content
).hexdigest()))
2151 with GlobalData
.file_lock
:
2152 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2153 IR
.AutoGenFileList
= self
.AutoGenFileList
.keys()
2154 IR
.MakeHeaderFilesHashChain
= FileList
2155 IR
.MakeHeaderFilesHashDigest
= m
.digest()
2156 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2158 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2160 def GenMakeHash(self
, gDict
):
2161 # Early exit if module or library has been hashed and is in memory
2162 if (self
.MetaFile
.Path
, self
.Arch
) in gDict
and \
2163 gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2164 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2166 # skip binary module
2167 if self
.IsBinaryModule
:
2170 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2171 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
:
2172 self
.GenModuleFilesHash(gDict
)
2173 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
:
2174 self
.GenMakeHeaderFilesHash(gDict
)
2176 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2177 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
or \
2178 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
or \
2179 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
or \
2180 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
:
2181 EdkLogger
.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2184 # Initialze hash object
2188 # Add hash of makefile and dependency header files
2189 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashDigest
)
2190 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHeaderFilesHashChain
) - set(MakeHashChain
))
2191 New
.sort(key
=lambda x
: str(x
))
2192 MakeHashChain
+= New
2195 if self
.LibraryAutoGenList
:
2196 for Lib
in sorted(self
.LibraryAutoGenList
, key
=lambda x
: x
.Name
):
2197 if not (Lib
.MetaFile
.Path
, Lib
.Arch
) in gDict
or \
2198 not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
:
2199 Lib
.GenMakeHash(gDict
)
2200 if not gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
:
2201 print("Cannot generate MakeHash for lib module:", Lib
.MetaFile
.Path
, Lib
.Arch
)
2203 m
.update(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashDigest
)
2204 New
= list(set(gDict
[(Lib
.MetaFile
.Path
, Lib
.Arch
)].MakeHashChain
) - set(MakeHashChain
))
2205 New
.sort(key
=lambda x
: str(x
))
2206 MakeHashChain
+= New
2209 m
.update(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesHashDigest
)
2210 New
= list(set(gDict
[(self
.MetaFile
.Path
, self
.Arch
)].ModuleFilesChain
) - set(MakeHashChain
))
2211 New
.sort(key
=lambda x
: str(x
))
2212 MakeHashChain
+= New
2214 with GlobalData
.file_lock
:
2215 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2216 IR
.MakeHashDigest
= m
.digest()
2217 IR
.MakeHashHexDigest
= m
.hexdigest()
2218 IR
.MakeHashChain
= MakeHashChain
2219 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2221 return gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2223 ## Decide whether we can skip the left autogen and make process
2224 def CanSkipbyPreMakefileCache(self
, gDict
):
2225 if not GlobalData
.gBinCacheSource
:
2228 # If Module is binary, do not skip by cache
2229 if self
.IsBinaryModule
:
2232 # .inc is contains binary information so do not skip by hash as well
2233 for f_ext
in self
.SourceFileList
:
2234 if '.inc' in str(f_ext
):
2237 # Get the module hash values from stored cache and currrent build
2238 # then check whether cache hit based on the hash values
2239 # if cache hit, restore all the files from cache
2240 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2241 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2243 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2244 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2245 if not os
.path
.exists(ModuleHashPair
):
2246 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2250 f
= open(ModuleHashPair
, 'r')
2251 ModuleHashPairList
= json
.load(f
)
2254 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2257 self
.GenPreMakefileHash(gDict
)
2258 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2259 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
:
2260 EdkLogger
.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2264 CurrentPreMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakefileHashHexDigest
2265 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2266 if PreMakefileHash
== CurrentPreMakeHash
:
2267 MakeHashStr
= str(MakeHash
)
2272 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2273 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2275 if not os
.path
.exists(TargetHashDir
):
2276 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2279 for root
, dir, files
in os
.walk(TargetHashDir
):
2281 File
= path
.join(root
, f
)
2282 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2283 if os
.path
.exists(TargetFfsHashDir
):
2284 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2286 File
= path
.join(root
, f
)
2287 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2289 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2290 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2292 with GlobalData
.file_lock
:
2293 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2294 IR
.PreMakeCacheHit
= True
2295 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2296 print("[cache hit]: checkpoint_PreMakefile:", self
.MetaFile
.Path
, self
.Arch
)
2297 #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))
2300 ## Decide whether we can skip the make process
2301 def CanSkipbyMakeCache(self
, gDict
):
2302 if not GlobalData
.gBinCacheSource
:
2305 # If Module is binary, do not skip by cache
2306 if self
.IsBinaryModule
:
2307 print("[cache miss]: checkpoint_Makefile: binary module:", self
.MetaFile
.Path
, self
.Arch
)
2310 # .inc is contains binary information so do not skip by hash as well
2311 for f_ext
in self
.SourceFileList
:
2312 if '.inc' in str(f_ext
):
2313 with GlobalData
.file_lock
:
2314 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2315 IR
.MakeCacheHit
= False
2316 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2317 print("[cache miss]: checkpoint_Makefile: .inc module:", self
.MetaFile
.Path
, self
.Arch
)
2320 # Get the module hash values from stored cache and currrent build
2321 # then check whether cache hit based on the hash values
2322 # if cache hit, restore all the files from cache
2323 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2324 FfsDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, TAB_FV_DIRECTORY
, "Ffs", self
.Guid
+ self
.Name
)
2326 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2327 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2328 if not os
.path
.exists(ModuleHashPair
):
2329 EdkLogger
.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair
)
2333 f
= open(ModuleHashPair
, 'r')
2334 ModuleHashPairList
= json
.load(f
)
2337 EdkLogger
.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair
)
2340 self
.GenMakeHash(gDict
)
2341 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
or \
2342 not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
:
2343 EdkLogger
.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self
.MetaFile
.Path
, self
.Arch
))
2347 CurrentMakeHash
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashHexDigest
2348 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2349 if MakeHash
== CurrentMakeHash
:
2350 MakeHashStr
= str(MakeHash
)
2353 print("[cache miss]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2356 TargetHashDir
= path
.join(FileDir
, MakeHashStr
)
2357 TargetFfsHashDir
= path
.join(FfsDir
, MakeHashStr
)
2358 if not os
.path
.exists(TargetHashDir
):
2359 EdkLogger
.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir
)
2362 for root
, dir, files
in os
.walk(TargetHashDir
):
2364 File
= path
.join(root
, f
)
2365 self
.CacheCopyFile(self
.OutputDir
, TargetHashDir
, File
)
2367 if os
.path
.exists(TargetFfsHashDir
):
2368 for root
, dir, files
in os
.walk(TargetFfsHashDir
):
2370 File
= path
.join(root
, f
)
2371 self
.CacheCopyFile(self
.FfsOutputDir
, TargetFfsHashDir
, File
)
2373 if self
.Name
== "PcdPeim" or self
.Name
== "PcdDxe":
2374 CreatePcdDatabaseCode(self
, TemplateString(), TemplateString())
2375 with GlobalData
.file_lock
:
2376 IR
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)]
2377 IR
.MakeCacheHit
= True
2378 gDict
[(self
.MetaFile
.Path
, self
.Arch
)] = IR
2379 print("[cache hit]: checkpoint_Makefile:", self
.MetaFile
.Path
, self
.Arch
)
2382 ## Show the first file name which causes cache miss
2383 def PrintFirstMakeCacheMissFile(self
, gDict
):
2384 if not GlobalData
.gBinCacheSource
:
2387 # skip binary module
2388 if self
.IsBinaryModule
:
2391 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2394 # Only print cache miss file for the MakeCache not hit module
2395 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2398 if not gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
:
2399 EdkLogger
.quiet("[cache insight]: MakeHashChain is missing for: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2402 # Find the cache dir name through the .ModuleHashPair file info
2403 FileDir
= path
.join(GlobalData
.gBinCacheSource
, self
.PlatformInfo
.OutputDir
, self
.BuildTarget
+ "_" + self
.ToolChain
, self
.Arch
, self
.SourceDir
, self
.MetaFile
.BaseName
)
2405 ModuleHashPairList
= [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
2406 ModuleHashPair
= path
.join(FileDir
, self
.Name
+ ".ModuleHashPair")
2407 if not os
.path
.exists(ModuleHashPair
):
2408 EdkLogger
.quiet("[cache insight]: Cannot find ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2412 f
= open(ModuleHashPair
, 'r')
2413 ModuleHashPairList
= json
.load(f
)
2416 EdkLogger
.quiet("[cache insight]: Cannot load ModuleHashPair file for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2420 for idx
, (PreMakefileHash
, MakeHash
) in enumerate (ModuleHashPairList
):
2421 TargetHashDir
= path
.join(FileDir
, str(MakeHash
))
2422 if os
.path
.exists(TargetHashDir
):
2423 MakeHashSet
.add(MakeHash
)
2425 EdkLogger
.quiet("[cache insight]: Cannot find valid cache dir for module: %s[%s]" % (self
.MetaFile
.Path
, self
.Arch
))
2428 TargetHash
= list(MakeHashSet
)[0]
2429 TargetHashDir
= path
.join(FileDir
, str(TargetHash
))
2430 if len(MakeHashSet
) > 1 :
2431 EdkLogger
.quiet("[cache insight]: found multiple cache dirs for this module, random select dir '%s' to search the first cache miss file: %s[%s]" % (TargetHash
, self
.MetaFile
.Path
, self
.Arch
))
2433 ListFile
= path
.join(TargetHashDir
, self
.Name
+ '.MakeHashChain')
2434 if os
.path
.exists(ListFile
):
2436 f
= open(ListFile
, 'r')
2437 CachedList
= json
.load(f
)
2440 EdkLogger
.quiet("[cache insight]: Cannot load MakeHashChain file: %s" % ListFile
)
2443 EdkLogger
.quiet("[cache insight]: Cannot find MakeHashChain file: %s" % ListFile
)
2446 CurrentList
= gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeHashChain
2447 for idx
, (file, hash) in enumerate (CurrentList
):
2448 (filecached
, hashcached
) = CachedList
[idx
]
2449 if file != filecached
:
2450 EdkLogger
.quiet("[cache insight]: first different file in %s[%s] is %s, the cached one is %s" % (self
.MetaFile
.Path
, self
.Arch
, file, filecached
))
2452 if hash != hashcached
:
2453 EdkLogger
.quiet("[cache insight]: first cache miss file in %s[%s] is %s" % (self
.MetaFile
.Path
, self
.Arch
, file))
2458 ## Decide whether we can skip the ModuleAutoGen process
2459 def CanSkipbyCache(self
, gDict
):
2460 # Hashing feature is off
2461 if not GlobalData
.gBinCacheSource
:
2464 if self
in GlobalData
.gBuildHashSkipTracking
:
2465 return GlobalData
.gBuildHashSkipTracking
[self
]
2467 # If library or Module is binary do not skip by hash
2468 if self
.IsBinaryModule
:
2469 GlobalData
.gBuildHashSkipTracking
[self
] = False
2472 # .inc is contains binary information so do not skip by hash as well
2473 for f_ext
in self
.SourceFileList
:
2474 if '.inc' in str(f_ext
):
2475 GlobalData
.gBuildHashSkipTracking
[self
] = False
2478 if not (self
.MetaFile
.Path
, self
.Arch
) in gDict
:
2481 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].PreMakeCacheHit
:
2482 GlobalData
.gBuildHashSkipTracking
[self
] = True
2485 if gDict
[(self
.MetaFile
.Path
, self
.Arch
)].MakeCacheHit
:
2486 GlobalData
.gBuildHashSkipTracking
[self
] = True
2491 ## Decide whether we can skip the ModuleAutoGen process
2492 # If any source file is newer than the module than we cannot skip
2495 # Don't skip if cache feature enabled
2496 if GlobalData
.gUseHashCache
or GlobalData
.gBinCacheDest
or GlobalData
.gBinCacheSource
:
2498 if self
.MakeFileDir
in GlobalData
.gSikpAutoGenCache
:
2500 if not os
.path
.exists(self
.TimeStampPath
):
2502 #last creation time of the module
2503 DstTimeStamp
= os
.stat(self
.TimeStampPath
)[8]
2505 SrcTimeStamp
= self
.Workspace
._SrcTimeStamp
2506 if SrcTimeStamp
> DstTimeStamp
:
2509 with
open(self
.TimeStampPath
,'r') as f
:
2511 source
= source
.rstrip('\n')
2512 if not os
.path
.exists(source
):
2514 if source
not in ModuleAutoGen
.TimeDict
:
2515 ModuleAutoGen
.TimeDict
[source
] = os
.stat(source
)[8]
2516 if ModuleAutoGen
.TimeDict
[source
] > DstTimeStamp
:
2518 GlobalData
.gSikpAutoGenCache
.add(self
.MakeFileDir
)
2522 def TimeStampPath(self
):
2523 return os
.path
.join(self
.MakeFileDir
, 'AutoGenTimeStamp')