X-Git-Url: https://git.proxmox.com/?p=mirror_edk2.git;a=blobdiff_plain;f=BaseTools%2FSource%2FPython%2FAutoGen%2FAutoGen.py;h=95e3e912b168a48362955e93c7c10ad73e540969;hp=205a75dc805104bdf46ff0cf54a276a98f8225a2;hb=90456c3c06d9c370b48417754ece2fb8252da5f8;hpb=97cdb33b575a80ca5c20ad862331f3c6d9415575 diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py index 205a75dc80..95e3e912b1 100644 --- a/BaseTools/Source/Python/AutoGen/AutoGen.py +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -1,7 +1,7 @@ ## @file # Generate AutoGen.h, AutoGen.c and *.depex files # -# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.
# This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -43,6 +43,8 @@ from Workspace.MetaFileCommentParser import UsageList from Common.MultipleWorkspace import MultipleWorkspace as mws import InfSectionParser import datetime +import hashlib +from GenVar import VariableMgr,var_info ## Regular expression for splitting Dependency Expression string into tokens gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)") @@ -143,6 +145,8 @@ ${END} ${depexsection_item} +${userextension_tianocore_item} + ${tail_comments} [BuildOptions.${module_arch}] @@ -263,6 +267,11 @@ class WorkspaceAutoGen(AutoGen): self.FvTargetList = Fvs self.CapTargetList = Caps self.AutoGenObjectList = [] + self._BuildDir = None + self._FvDir = None + self._MakeFileDir = None + self._BuildCommand = None + self._GuidDict = {} # there's many relative directory operations, so ... os.chdir(self.WorkspaceDir) @@ -389,71 +398,10 @@ class WorkspaceAutoGen(AutoGen): for Arch in self.ArchList: Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain] - DecPcds = {} - DecPcdsKey = set() - PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch) - if GlobalData.BuildOptionPcd: - for i, pcd in enumerate(GlobalData.BuildOptionPcd): - if type(pcd) is tuple: - continue - (pcdname, pcdvalue) = pcd.split('=') - if not pcdvalue: - EdkLogger.error('build', AUTOGEN_ERROR, "No Value specified for the PCD %s." % (pcdname)) - if '.' in pcdname: - (TokenSpaceGuidCName, TokenCName) = pcdname.split('.') - HasTokenSpace = True - else: - TokenCName = pcdname - TokenSpaceGuidCName = '' - HasTokenSpace = False - TokenSpaceGuidCNameList = [] - FoundFlag = False - PcdDatumType = '' - NewValue = '' - for package in PGen.PackageList: - for key in package.Pcds: - PcdItem = package.Pcds[key] - if HasTokenSpace: - if (PcdItem.TokenCName, PcdItem.TokenSpaceGuidCName) == (TokenCName, TokenSpaceGuidCName): - PcdDatumType = PcdItem.DatumType - NewValue = self._BuildOptionPcdValueFormat(TokenSpaceGuidCName, TokenCName, PcdDatumType, pcdvalue) - FoundFlag = True - else: - if PcdItem.TokenCName == TokenCName: - if not PcdItem.TokenSpaceGuidCName in TokenSpaceGuidCNameList: - if len (TokenSpaceGuidCNameList) < 1: - TokenSpaceGuidCNameList.append(PcdItem.TokenSpaceGuidCName) - PcdDatumType = PcdItem.DatumType - TokenSpaceGuidCName = PcdItem.TokenSpaceGuidCName - NewValue = self._BuildOptionPcdValueFormat(TokenSpaceGuidCName, TokenCName, PcdDatumType, pcdvalue) - FoundFlag = True - else: - EdkLogger.error( - 'build', - AUTOGEN_ERROR, - "The Pcd %s is found under multiple different TokenSpaceGuid: %s and %s." % (TokenCName, PcdItem.TokenSpaceGuidCName, TokenSpaceGuidCNameList[0]) - ) - GlobalData.BuildOptionPcd[i] = (TokenSpaceGuidCName, TokenCName, NewValue) - if not FoundFlag: - if HasTokenSpace: - EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s.%s is not found in the DEC file." % (TokenSpaceGuidCName, TokenCName)) - else: - EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s is not found in the DEC file." % (TokenCName)) - for BuildData in PGen.BuildDatabase._CACHE_.values(): - if BuildData.Arch != Arch: - continue - if BuildData.MetaFile.Ext == '.dec': - continue - for key in BuildData.Pcds: - PcdItem = BuildData.Pcds[key] - if (TokenSpaceGuidCName, TokenCName) == (PcdItem.TokenSpaceGuidCName, PcdItem.TokenCName): - PcdItem.DefaultValue = NewValue - if (TokenCName, TokenSpaceGuidCName) in PcdSet: - PcdSet[(TokenCName, TokenSpaceGuidCName)] = NewValue SourcePcdDict = {'DynamicEx':[], 'PatchableInModule':[],'Dynamic':[],'FixedAtBuild':[]} BinaryPcdDict = {'DynamicEx':[], 'PatchableInModule':[]} @@ -461,6 +409,7 @@ class WorkspaceAutoGen(AutoGen): BinaryPcdDict_Keys = BinaryPcdDict.keys() # generate the SourcePcdDict and BinaryPcdDict + PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch) for BuildData in PGen.BuildDatabase._CACHE_.values(): if BuildData.Arch != Arch: continue @@ -603,6 +552,8 @@ class WorkspaceAutoGen(AutoGen): ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain] PkgSet.update(ModuleData.Packages) Pkgs = list(PkgSet) + list(PGen.PackageList) + DecPcds = {} + DecPcdsKey = set() for Pkg in Pkgs: for Pcd in Pkg.Pcds: DecPcds[Pcd[0], Pcd[1]] = Pkg.Pcds[Pcd] @@ -642,6 +593,14 @@ class WorkspaceAutoGen(AutoGen): Pa.CollectFixedAtBuildPcds() self.AutoGenObjectList.append(Pa) + # + # Generate Package level hash value + # + GlobalData.gPackageHash[Arch] = {} + if GlobalData.gUseHashCache: + for Pkg in Pkgs: + self._GenPkgLevelHash(Pkg) + # # Check PCDs token value conflict in each DEC file. # @@ -655,21 +614,37 @@ class WorkspaceAutoGen(AutoGen): # if self.FdfFile: # self._CheckDuplicateInFV(Fdf) - self._BuildDir = None - self._FvDir = None - self._MakeFileDir = None - self._BuildCommand = None - # - # Create BuildOptions Macro & PCD metafile. + # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file. # content = 'gCommandLineDefines: ' content += str(GlobalData.gCommandLineDefines) content += os.linesep content += 'BuildOptionPcd: ' content += str(GlobalData.BuildOptionPcd) + content += os.linesep + content += 'Active Platform: ' + content += str(self.Platform) + content += os.linesep + if self.FdfFile: + content += 'Flash Image Definition: ' + content += str(self.FdfFile) + content += os.linesep SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False) + # + # Create PcdToken Number file for Dynamic/DynamicEx Pcd. + # + PcdTokenNumber = 'PcdTokenNumber: ' + if Pa.PcdTokenNumber: + if Pa.DynamicPcdList: + for Pcd in Pa.DynamicPcdList: + PcdTokenNumber += os.linesep + PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName)) + PcdTokenNumber += ' : ' + PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]) + SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False) + # # Get set of workspace metafiles # @@ -684,6 +659,18 @@ class WorkspaceAutoGen(AutoGen): SrcTimeStamp = os.stat(f)[8] self._SrcTimeStamp = SrcTimeStamp + if GlobalData.gUseHashCache: + m = hashlib.md5() + for files in AllWorkSpaceMetaFiles: + if files.endswith('.dec'): + continue + f = open(files, 'r') + Content = f.read() + f.close() + m.update(Content) + SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True) + GlobalData.gPlatformHash = m.hexdigest() + # # Write metafile list to build directory # @@ -697,31 +684,29 @@ class WorkspaceAutoGen(AutoGen): print >> file, f return True - def _BuildOptionPcdValueFormat(self, TokenSpaceGuidCName, TokenCName, PcdDatumType, Value): - if PcdDatumType == 'VOID*': - if Value.startswith('L'): - if not Value[1]: - EdkLogger.error('build', OPTION_VALUE_INVALID, 'For Void* type PCD, when specify the Value in the command line, please use the following format: "string", L"string", B"{...}"') - Value = Value[0] + '"' + Value[1:] + '"' - elif Value.startswith('B'): - if not Value[1]: - EdkLogger.error('build', OPTION_VALUE_INVALID, 'For Void* type PCD, when specify the Value in the command line, please use the following format: "string", L"string", B"{...}"') - Value = Value[1:] - else: - if not Value[0]: - EdkLogger.error('build', OPTION_VALUE_INVALID, 'For Void* type PCD, when specify the Value in the command line, please use the following format: "string", L"string", B"{...}"') - Value = '"' + Value + '"' - - IsValid, Cause = CheckPcdDatum(PcdDatumType, Value) - if not IsValid: - EdkLogger.error('build', FORMAT_INVALID, Cause, ExtraData="%s.%s" % (TokenSpaceGuidCName, TokenCName)) - if PcdDatumType == 'BOOLEAN': - Value = Value.upper() - if Value == 'TRUE' or Value == '1': - Value = '1' - elif Value == 'FALSE' or Value == '0': - Value = '0' - return Value + def _GenPkgLevelHash(self, Pkg): + PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName) + CreateDirectory(PkgDir) + HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash') + m = hashlib.md5() + # Get .dec file's hash value + f = open(Pkg.MetaFile.Path, 'r') + Content = f.read() + f.close() + m.update(Content) + # Get include files hash value + if Pkg.Includes: + for inc in Pkg.Includes: + for Root, Dirs, Files in os.walk(str(inc)): + for File in Files: + File_Path = os.path.join(Root, File) + f = open(File_Path, 'r') + Content = f.read() + f.close() + m.update(Content) + SaveFileOnChange(HashFile, m.hexdigest(), True) + if Pkg.PackageName not in GlobalData.gPackageHash[Pkg.Arch]: + GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest() def _GetMetaFiles(self, Target, Toolchain, Arch): AllWorkSpaceMetaFiles = set() @@ -749,6 +734,10 @@ class WorkspaceAutoGen(AutoGen): # AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions')) + # add PcdToken Number file for Dynamic/DynamicEx Pcd + # + AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber')) + for Arch in self.ArchList: Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain] PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch) @@ -955,7 +944,8 @@ class WorkspaceAutoGen(AutoGen): ## Return the directory to store all intermediate and final files built def _GetBuildDir(self): - return self.AutoGenObjectList[0].BuildDir + if self._BuildDir == None: + return self.AutoGenObjectList[0].BuildDir ## Return the build output directory platform specifies def _GetOutputDir(self): @@ -1178,6 +1168,8 @@ class PlatformAutoGen(AutoGen): self.AllPcdList = [] # get the original module/package/platform objects self.BuildDatabase = Workspace.BuildDatabase + self.DscBuildDataObj = Workspace.Platform + self._GuidDict = Workspace._GuidDict # flag indicating if the makefile/C-code file has been created or not self.IsMakeFileCreated = False @@ -1214,6 +1206,9 @@ class PlatformAutoGen(AutoGen): self._BuildCommand = None self._AsBuildInfList = [] self._AsBuildModuleList = [] + + self.VariableInfo = None + if GlobalData.gFdfParser != None: self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList for Inf in self._AsBuildInfList: @@ -1225,6 +1220,7 @@ class PlatformAutoGen(AutoGen): # get library/modules for build self.LibraryBuildDirectoryList = [] self.ModuleBuildDirectoryList = [] + return True def __repr__(self): @@ -1258,12 +1254,15 @@ class PlatformAutoGen(AutoGen): # @param CreateModuleMakeFile Flag indicating if the makefile for # modules will be created as well # - def CreateMakeFile(self, CreateModuleMakeFile=False): + def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}): if CreateModuleMakeFile: for ModuleFile in self.Platform.Modules: Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile) - Ma.CreateMakeFile(True) + if (ModuleFile.File, self.Arch) in FfsCommand: + Ma.CreateMakeFile(True, FfsCommand[ModuleFile.File, self.Arch]) + else: + Ma.CreateMakeFile(True) #Ma.CreateAsBuiltInf() # no need to create makefile for the platform more than once @@ -1303,23 +1302,72 @@ class PlatformAutoGen(AutoGen): if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]: LibAuto.ConstPcd[key] = Pcd.DefaultValue + def CollectVariables(self, DynamicPcdSet): + + VpdRegionSize = 0 + VpdRegionBase = 0 + if self.Workspace.FdfFile: + FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName] + for FdRegion in FdDict.RegionList: + for item in FdRegion.RegionDataList: + if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item: + VpdRegionSize = FdRegion.Size + VpdRegionBase = FdRegion.Offset + break + + + VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(),self.DscBuildDataObj._GetSkuIds()) + VariableInfo.SetVpdRegionMaxSize(VpdRegionSize) + VariableInfo.SetVpdRegionOffset(VpdRegionBase) + Index = 0 + for Pcd in DynamicPcdSet: + pcdname = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName)) + for SkuName in Pcd.SkuInfoList: + Sku = Pcd.SkuInfoList[SkuName] + SkuId = Sku.SkuId + if SkuId == None or SkuId == '': + continue + if len(Sku.VariableName) > 0: + VariableGuidStructure = Sku.VariableGuidValue + VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure) + for StorageName in Sku.DefaultStoreDict: + VariableInfo.append_variable(var_info(Index,pcdname,StorageName,SkuName, StringToArray(Sku.VariableName),VariableGuid, Sku.VariableOffset, Sku.VariableAttribute , Sku.HiiDefaultValue,Sku.DefaultStoreDict[StorageName],Pcd.DatumType)) + Index += 1 + return VariableInfo + + def UpdateNVStoreMaxSize(self,OrgVpdFile): + if self.VariableInfo: + VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid) + PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"] + + if PcdNvStoreDfBuffer: + if os.path.exists(VpdMapFilePath): + OrgVpdFile.Read(VpdMapFilePath) + PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0]) + NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0' + else: + EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath) + + NvStoreOffset = int(NvStoreOffset,16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset) + default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get("DEFAULT") + maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(",")) + var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize) + + if var_data and default_skuobj: + default_skuobj.DefaultValue = var_data + PcdNvStoreDfBuffer[0].DefaultValue = var_data + PcdNvStoreDfBuffer[0].SkuInfoList.clear() + PcdNvStoreDfBuffer[0].SkuInfoList['DEFAULT'] = default_skuobj + PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(","))) + + return OrgVpdFile + ## Collect dynamic PCDs # # Gather dynamic PCDs list from each module and their settings from platform # This interface should be invoked explicitly when platform action is created. # def CollectPlatformDynamicPcds(self): - # Override the platform Pcd's value by build option - if GlobalData.BuildOptionPcd: - for key in self.Platform.Pcds: - PlatformPcd = self.Platform.Pcds[key] - for PcdItem in GlobalData.BuildOptionPcd: - if (PlatformPcd.TokenSpaceGuidCName, PlatformPcd.TokenCName) == (PcdItem[0], PcdItem[1]): - PlatformPcd.DefaultValue = PcdItem[2] - if PlatformPcd.SkuInfoList: - Sku = PlatformPcd.SkuInfoList[PlatformPcd.SkuInfoList.keys()[0]] - Sku.DefaultValue = PcdItem[2] - break for key in self.Platform.Pcds: for SinglePcd in GlobalData.MixedPcd: @@ -1497,9 +1545,9 @@ class PlatformAutoGen(AutoGen): # # The reason of sorting is make sure the unicode string is in double-byte alignment in string table. # - UnicodePcdArray = [] - HiiPcdArray = [] - OtherPcdArray = [] + UnicodePcdArray = set() + HiiPcdArray = set() + OtherPcdArray = set() VpdPcdDict = {} VpdFile = VpdInfoFile.VpdInfoFile() NeedProcessVpdMapFile = False @@ -1508,34 +1556,55 @@ class PlatformAutoGen(AutoGen): if pcd not in self._PlatformPcds.keys(): self._PlatformPcds[pcd] = self.Platform.Pcds[pcd] + for item in self._PlatformPcds: + if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]: + self._PlatformPcds[item].DatumType = "VOID*" + if (self.Workspace.ArchList[-1] == self.Arch): for Pcd in self._DynamicPcdList: # just pick the a value to determine whether is unicode string type Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]] Sku.VpdOffset = Sku.VpdOffset.strip() - PcdValue = Sku.DefaultValue - if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"): + if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]: + Pcd.DatumType = "VOID*" + # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex - UnicodePcdArray.append(Pcd) - elif len(Sku.VariableName) > 0: # if found HII type PCD then insert to right of UnicodeIndex - HiiPcdArray.append(Pcd) - else: - OtherPcdArray.append(Pcd) if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]: VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd + #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer + PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer","gEfiMdeModulePkgTokenSpaceGuid")) + if PcdNvStoreDfBuffer: + self.VariableInfo = self.CollectVariables(self._DynamicPcdList) + vardump = self.VariableInfo.dump() + if vardump: + PcdNvStoreDfBuffer.DefaultValue = vardump + for skuname in PcdNvStoreDfBuffer.SkuInfoList: + PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump + PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(","))) + PlatformPcds = self._PlatformPcds.keys() PlatformPcds.sort() # # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up. # + VpdSkuMap = {} for PcdKey in PlatformPcds: Pcd = self._PlatformPcds[PcdKey] if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \ PcdKey in VpdPcdDict: Pcd = VpdPcdDict[PcdKey] + SkuValueMap = {} + DefaultSku = Pcd.SkuInfoList.get('DEFAULT') + if DefaultSku: + PcdValue = DefaultSku.DefaultValue + if PcdValue not in SkuValueMap: + SkuValueMap[PcdValue] = [] + VpdFile.Add(Pcd, 'DEFAULT',DefaultSku.VpdOffset) + SkuValueMap[PcdValue].append(DefaultSku) + for (SkuName,Sku) in Pcd.SkuInfoList.items(): Sku.VpdOffset = Sku.VpdOffset.strip() PcdValue = Sku.DefaultValue @@ -1560,7 +1629,10 @@ class PlatformAutoGen(AutoGen): EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile) else: EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment)) - VpdFile.Add(Pcd, Sku.VpdOffset) + if PcdValue not in SkuValueMap: + SkuValueMap[PcdValue] = [] + VpdFile.Add(Pcd, SkuName,Sku.VpdOffset) + SkuValueMap[PcdValue].append(Sku) # if the offset of a VPD is *, then it need to be fixed up by third party tool. if not NeedProcessVpdMapFile and Sku.VpdOffset == "*": NeedProcessVpdMapFile = True @@ -1568,7 +1640,7 @@ class PlatformAutoGen(AutoGen): EdkLogger.error("Build", FILE_NOT_FOUND, \ "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.") - + VpdSkuMap[PcdKey] = SkuValueMap # # Fix the PCDs define in VPD PCD section that never referenced by module. # An example is PCD for signature usage. @@ -1587,7 +1659,13 @@ class PlatformAutoGen(AutoGen): # Not found, it should be signature if not FoundFlag : # just pick the a value to determine whether is unicode string type - for (SkuName,Sku) in DscPcdEntry.SkuInfoList.items(): + SkuValueMap = {} + SkuObjList = DscPcdEntry.SkuInfoList.items() + DefaultSku = DscPcdEntry.SkuInfoList.get('DEFAULT') + if DefaultSku: + defaultindex = SkuObjList.index(('DEFAULT',DefaultSku)) + SkuObjList[0],SkuObjList[defaultindex] = SkuObjList[defaultindex],SkuObjList[0] + for (SkuName,Sku) in SkuObjList: Sku.VpdOffset = Sku.VpdOffset.strip() # Need to iterate DEC pcd information to get the value & datumtype @@ -1612,7 +1690,6 @@ class PlatformAutoGen(AutoGen): if DscPcdEntry not in self._DynamicPcdList: self._DynamicPcdList.append(DscPcdEntry) -# Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]] Sku.VpdOffset = Sku.VpdOffset.strip() PcdValue = Sku.DefaultValue if PcdValue == "": @@ -1636,49 +1713,31 @@ class PlatformAutoGen(AutoGen): EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile) else: EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment)) - VpdFile.Add(DscPcdEntry, Sku.VpdOffset) + if PcdValue not in SkuValueMap: + SkuValueMap[PcdValue] = [] + VpdFile.Add(DscPcdEntry, SkuName,Sku.VpdOffset) + SkuValueMap[PcdValue].append(Sku) if not NeedProcessVpdMapFile and Sku.VpdOffset == "*": NeedProcessVpdMapFile = True if DscPcdEntry.DatumType == 'VOID*' and PcdValue.startswith("L"): - UnicodePcdArray.append(DscPcdEntry) + UnicodePcdArray.add(DscPcdEntry) elif len(Sku.VariableName) > 0: - HiiPcdArray.append(DscPcdEntry) + HiiPcdArray.add(DscPcdEntry) else: - OtherPcdArray.append(DscPcdEntry) - + OtherPcdArray.add(DscPcdEntry) + # if the offset of a VPD is *, then it need to be fixed up by third party tool. - - - + VpdSkuMap[DscPcd] = SkuValueMap if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \ VpdFile.GetCount() != 0: EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile)) if VpdFile.GetCount() != 0: - FvPath = os.path.join(self.BuildDir, "FV") - if not os.path.exists(FvPath): - try: - os.makedirs(FvPath) - except: - EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir) - - VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid) - - if VpdFile.Write(VpdFilePath): - # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file. - BPDGToolName = None - for ToolDef in self.ToolDefinition.values(): - if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid: - if not ToolDef.has_key("PATH"): - EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid) - BPDGToolName = ToolDef["PATH"] - break - # Call third party GUID BPDG tool. - if BPDGToolName != None: - VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath) - else: - EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.") + + self.FixVpdOffset(VpdFile) + + self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile)) # Process VPD map file generated by third party BPDG tool if NeedProcessVpdMapFile: @@ -1687,23 +1746,76 @@ class PlatformAutoGen(AutoGen): VpdFile.Read(VpdMapFilePath) # Fixup "*" offset - for Pcd in self._DynamicPcdList: + for pcd in VpdSkuMap: + vpdinfo = VpdFile.GetVpdInfo(pcd) + if vpdinfo is None: # just pick the a value to determine whether is unicode string type - i = 0 - for (SkuName,Sku) in Pcd.SkuInfoList.items(): - if Sku.VpdOffset == "*": - Sku.VpdOffset = VpdFile.GetOffset(Pcd)[i].strip() - i += 1 + continue + for pcdvalue in VpdSkuMap[pcd]: + for sku in VpdSkuMap[pcd][pcdvalue]: + for item in vpdinfo: + if item[2] == pcdvalue: + sku.VpdOffset = item[1] else: EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath) - # Delete the DynamicPcdList At the last time enter into this function + # Delete the DynamicPcdList At the last time enter into this function + for Pcd in self._DynamicPcdList: + # just pick the a value to determine whether is unicode string type + Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]] + Sku.VpdOffset = Sku.VpdOffset.strip() + + if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]: + Pcd.DatumType = "VOID*" + + PcdValue = Sku.DefaultValue + if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"): + # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex + UnicodePcdArray.add(Pcd) + elif len(Sku.VariableName) > 0: + # if found HII type PCD then insert to right of UnicodeIndex + HiiPcdArray.add(Pcd) + else: + OtherPcdArray.add(Pcd) del self._DynamicPcdList[:] - self._DynamicPcdList.extend(UnicodePcdArray) - self._DynamicPcdList.extend(HiiPcdArray) - self._DynamicPcdList.extend(OtherPcdArray) + self._DynamicPcdList.extend(list(UnicodePcdArray)) + self._DynamicPcdList.extend(list(HiiPcdArray)) + self._DynamicPcdList.extend(list(OtherPcdArray)) + allskuset = [(SkuName,Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName,Sku) in pcd.SkuInfoList.items()] + for pcd in self._DynamicPcdList: + if len(pcd.SkuInfoList) == 1: + for (SkuName,SkuId) in allskuset: + if type(SkuId) in (str,unicode) and eval(SkuId) == 0 or SkuId == 0: + continue + pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList['DEFAULT']) + pcd.SkuInfoList[SkuName].SkuId = SkuId self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList - + + def FixVpdOffset(self,VpdFile ): + FvPath = os.path.join(self.BuildDir, "FV") + if not os.path.exists(FvPath): + try: + os.makedirs(FvPath) + except: + EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir) + + VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid) + + if VpdFile.Write(VpdFilePath): + # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file. + BPDGToolName = None + for ToolDef in self.ToolDefinition.values(): + if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid: + if not ToolDef.has_key("PATH"): + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid) + BPDGToolName = ToolDef["PATH"] + break + # Call third party GUID BPDG tool. + if BPDGToolName != None: + VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath) + else: + EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.") + ## Return the platform build data object def _GetPlatform(self): if self._Platform == None: @@ -1749,6 +1861,7 @@ class PlatformAutoGen(AutoGen): self.OutputDir, self.BuildTarget + "_" + self.ToolChain, ) + GlobalData.gBuildDirectory = self._BuildDir return self._BuildDir ## Return directory of platform makefile @@ -1773,6 +1886,13 @@ class PlatformAutoGen(AutoGen): NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip() if NewOption != '': self._BuildCommand += SplitOption(NewOption) + if "MAKE" in self.EdkIIBuildOption: + if "FLAGS" in self.EdkIIBuildOption["MAKE"]: + Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"] + if Flags.startswith('='): + self._BuildCommand = [self._BuildCommand[0]] + [Flags[1:]] + else: + self._BuildCommand += [Flags] return self._BuildCommand ## Get tool chain definition @@ -2232,11 +2352,6 @@ class PlatformAutoGen(AutoGen): TokenCName = PcdItem[0] break if FromPcd != None: - if GlobalData.BuildOptionPcd: - for pcd in GlobalData.BuildOptionPcd: - if (FromPcd.TokenSpaceGuidCName, FromPcd.TokenCName) == (pcd[0], pcd[1]): - FromPcd.DefaultValue = pcd[2] - break if ToPcd.Pending and FromPcd.Type not in [None, '']: ToPcd.Type = FromPcd.Type elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\ @@ -2263,6 +2378,13 @@ class PlatformAutoGen(AutoGen): ToPcd.DatumType = FromPcd.DatumType if FromPcd.SkuInfoList not in [None, '', []]: ToPcd.SkuInfoList = FromPcd.SkuInfoList + # Add Flexible PCD format parse + if ToPcd.DefaultValue: + try: + ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self._GuidDict)(True) + except BadExpression, Value: + EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value), + File=self.MetaFile) # check the validation of datum IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue) @@ -2273,7 +2395,7 @@ class PlatformAutoGen(AutoGen): ToPcd.validlists = FromPcd.validlists ToPcd.expressions = FromPcd.expressions - if ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]: + if FromPcd != None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]: EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \ % (ToPcd.TokenSpaceGuidCName, TokenCName)) Value = ToPcd.DefaultValue @@ -2294,7 +2416,7 @@ class PlatformAutoGen(AutoGen): else: SkuName = 'DEFAULT' ToPcd.SkuInfoList = { - SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName], '', '', '', '', '', ToPcd.DefaultValue) + SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue) } ## Apply PCD setting defined platform to a module @@ -2334,8 +2456,31 @@ class PlatformAutoGen(AutoGen): if Module in self.Platform.Modules: PlatformModule = self.Platform.Modules[str(Module)] for Key in PlatformModule.Pcds: + Flag = False if Key in Pcds: - self._OverridePcd(Pcds[Key], PlatformModule.Pcds[Key], Module) + ToPcd = Pcds[Key] + Flag = True + elif Key in GlobalData.MixedPcd: + for PcdItem in GlobalData.MixedPcd[Key]: + if PcdItem in Pcds: + ToPcd = Pcds[PcdItem] + Flag = True + break + if Flag: + self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module) + # use PCD value to calculate the MaxDatumSize when it is not specified + for Name, Guid in Pcds: + Pcd = Pcds[Name, Guid] + if Pcd.DatumType == "VOID*" and Pcd.MaxDatumSize in ['', None]: + Value = Pcd.DefaultValue + if Value in [None, '']: + Pcd.MaxDatumSize = '1' + elif Value[0] == 'L': + Pcd.MaxDatumSize = str((len(Value) - 2) * 2) + elif Value[0] == '{': + Pcd.MaxDatumSize = str(len(Value.split(','))) + else: + Pcd.MaxDatumSize = str(len(Value) - 1) return Pcds.values() ## Resolve library names to library modules @@ -2684,6 +2829,7 @@ class ModuleAutoGen(AutoGen): self.BuildDatabase = self.Workspace.BuildDatabase self.BuildRuleOrder = None + self.BuildTime = 0 self._Module = None self._Name = None @@ -2699,6 +2845,7 @@ class ModuleAutoGen(AutoGen): self._BuildDir = None self._OutputDir = None + self._FfsOutputDir = None self._DebugDir = None self._MakeFileDir = None @@ -2759,10 +2906,7 @@ class ModuleAutoGen(AutoGen): if self._FixedAtBuildPcds: return self._FixedAtBuildPcds for Pcd in self.ModulePcdList: - if self.IsLibrary: - if not (Pcd.Pending == False and Pcd.Type == "FixedAtBuild"): - continue - elif Pcd.Type != "FixedAtBuild": + if Pcd.Type != "FixedAtBuild": continue if Pcd not in self._FixedAtBuildPcds: self._FixedAtBuildPcds.append(Pcd) @@ -2818,6 +2962,7 @@ class ModuleAutoGen(AutoGen): self._Macro["PLATFORM_RELATIVE_DIR" ] = self.PlatformInfo.SourceDir self._Macro["PLATFORM_DIR" ] = mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir) self._Macro["PLATFORM_OUTPUT_DIR" ] = self.PlatformInfo.OutputDir + self._Macro["FFS_OUTPUT_DIR" ] = self.FfsOutputDir return self._Macro ## Return the module build data object @@ -2908,6 +3053,15 @@ class ModuleAutoGen(AutoGen): CreateDirectory(self._OutputDir) return self._OutputDir + ## Return the directory to store ffs file + def _GetFfsOutputDir(self): + if self._FfsOutputDir == None: + if GlobalData.gFdfParser != None: + self._FfsOutputDir = path.join(self.PlatformInfo.BuildDir, "FV", "Ffs", self.Guid + self.Name) + else: + self._FfsOutputDir = '' + return self._FfsOutputDir + ## Return the directory to store auto-gened source files of the mdoule def _GetDebugDir(self): if self._DebugDir == None: @@ -3092,6 +3246,30 @@ class ModuleAutoGen(AutoGen): self._DepexExpressionList[ModuleType] = DepexExpressionList return self._DepexExpressionList + # Get the tiano core user extension, it is contain dependent library. + # @retval: a list contain tiano core userextension. + # + def _GetTianoCoreUserExtensionList(self): + TianoCoreUserExtentionList = [] + for M in [self.Module] + self.DependentLibraryList: + Filename = M.MetaFile.Path + InfObj = InfSectionParser.InfSectionParser(Filename) + TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore() + for TianoCoreUserExtent in TianoCoreUserExtenList: + for Section in TianoCoreUserExtent.keys(): + ItemList = Section.split(TAB_SPLIT) + Arch = self.Arch + if len(ItemList) == 4: + Arch = ItemList[3] + if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper(): + TianoCoreList = [] + TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END]) + TianoCoreList.extend(TianoCoreUserExtent[Section][:]) + TianoCoreList.append('\n') + TianoCoreUserExtentionList.append(TianoCoreList) + + return TianoCoreUserExtentionList + ## Return the list of specification version required for the module # # @retval list The list of specification defined in module file @@ -3185,13 +3363,13 @@ class ModuleAutoGen(AutoGen): EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, " "but [%s] is needed" % (F.TagName, str(F), self.ToolChain)) continue - # match tool chain family - if F.ToolChainFamily not in ("", "*", self.ToolChainFamily): + # match tool chain family or build rule family + if F.ToolChainFamily not in ("", "*", self.ToolChainFamily, self.BuildRuleFamily): EdkLogger.debug( EdkLogger.DEBUG_0, "The file [%s] must be built by tools of [%s], " \ - "but current toolchain family is [%s]" \ - % (str(F), F.ToolChainFamily, self.ToolChainFamily)) + "but current toolchain family is [%s], buildrule family is [%s]" \ + % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily)) continue # add the file path into search path list for file including @@ -3745,7 +3923,13 @@ class ModuleAutoGen(AutoGen): ## Create AsBuilt INF file the module # - def CreateAsBuiltInf(self): + def CreateAsBuiltInf(self, IsOnlyCopy = False): + self.OutputFile = [] + if IsOnlyCopy: + if GlobalData.gBinCacheDest: + self.CopyModuleToCache() + return + if self.IsAsBuiltInfCreated: return @@ -3859,6 +4043,11 @@ class ModuleAutoGen(AutoGen): 'libraryclasses_item' : [] } + if 'MODULE_UNI_FILE' in MDefs: + UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE']) + if os.path.isfile(UNIFile): + shutil.copy2(UNIFile, self.OutputDir) + if self.AutoGenVersion > int(gInfSpecVersion, 0): AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion else: @@ -3873,9 +4062,13 @@ class ModuleAutoGen(AutoGen): AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']] OutputDir = self.OutputDir.replace('\\', '/').strip('/') - + DebugDir = self.DebugDir.replace('\\', '/').strip('/') for Item in self.CodaTargetList: - File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/') + File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/') + if File not in self.OutputFile: + self.OutputFile.append(File) + if os.path.isabs(File): + File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/') if Item.Target.Ext.lower() == '.aml': AsBuiltInfDict['binary_item'] += ['ASL|' + File] elif Item.Target.Ext.lower() == '.acpi': @@ -3885,6 +4078,8 @@ class ModuleAutoGen(AutoGen): else: AsBuiltInfDict['binary_item'] += ['BIN|' + File] if self.DepexGenerated: + if self.Name + '.depex' not in self.OutputFile: + self.OutputFile.append(self.Name + '.depex') if self.ModuleType in ['PEIM']: AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex'] if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'UEFI_DRIVER']: @@ -3895,11 +4090,15 @@ class ModuleAutoGen(AutoGen): Bin = self._GenOffsetBin() if Bin: AsBuiltInfDict['binary_item'] += ['BIN|%s' % Bin] + if Bin not in self.OutputFile: + self.OutputFile.append(Bin) for Root, Dirs, Files in os.walk(OutputDir): for File in Files: if File.lower().endswith('.pdb'): AsBuiltInfDict['binary_item'] += ['DISPOSABLE|' + File] + if File not in self.OutputFile: + self.OutputFile.append(File) HeaderComments = self.Module.HeaderComments StartPos = 0 for Index in range(len(HeaderComments)): @@ -3948,7 +4147,7 @@ class ModuleAutoGen(AutoGen): elif BoolValue == 'FALSE': Pcd.DefaultValue = '0' - if Pcd.DatumType != 'VOID*': + if Pcd.DatumType in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN']: HexFormat = '0x%02x' if Pcd.DatumType == 'UINT16': HexFormat = '0x%04x' @@ -4058,6 +4257,16 @@ class ModuleAutoGen(AutoGen): for Library in self.LibraryAutoGenList: AsBuiltInfDict['libraryclasses_item'] += [Library.MetaFile.File.replace('\\', '/')] + # Generated UserExtensions TianoCore section. + # All tianocore user extensions are copied. + UserExtStr = '' + for TianoCore in self._GetTianoCoreUserExtensionList(): + UserExtStr += '\n'.join(TianoCore) + ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1]) + if os.path.isfile(ExtensionFile): + shutil.copy2(ExtensionFile, self.OutputDir) + AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr + # Generated depex expression section in comments. AsBuiltInfDict['depexsection_item'] = '' DepexExpresion = self._GetDepexExpresionString() @@ -4070,26 +4279,72 @@ class ModuleAutoGen(AutoGen): SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False) self.IsAsBuiltInfCreated = True - + if GlobalData.gBinCacheDest: + self.CopyModuleToCache() + + def CopyModuleToCache(self): + FileDir = path.join(GlobalData.gBinCacheDest, self.Arch, self.SourceDir, self.MetaFile.BaseName) + CreateDirectory (FileDir) + HashFile = path.join(self.BuildDir, self.Name + '.hash') + ModuleFile = path.join(self.OutputDir, self.Name + '.inf') + if os.path.exists(HashFile): + shutil.copy2(HashFile, FileDir) + if os.path.exists(ModuleFile): + shutil.copy2(ModuleFile, FileDir) + if not self.OutputFile: + Ma = self.Workspace.BuildDatabase[PathClass(ModuleFile), self.Arch, self.BuildTarget, self.ToolChain] + self.OutputFile = Ma.Binaries + if self.OutputFile: + for File in self.OutputFile: + File = str(File) + if not os.path.isabs(File): + File = os.path.join(self.OutputDir, File) + if os.path.exists(File): + shutil.copy2(File, FileDir) + + def AttemptModuleCacheCopy(self): + if self.IsBinaryModule: + return False + FileDir = path.join(GlobalData.gBinCacheSource, self.Arch, self.SourceDir, self.MetaFile.BaseName) + HashFile = path.join(FileDir, self.Name + '.hash') + if os.path.exists(HashFile): + f = open(HashFile, 'r') + CacheHash = f.read() + f.close() + if GlobalData.gModuleHash[self.Arch][self.Name]: + if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]: + for root, dir, files in os.walk(FileDir): + for f in files: + if self.Name + '.hash' in f: + shutil.copy2(HashFile, self.BuildDir) + else: + File = path.join(root, f) + shutil.copy2(File, self.OutputDir) + if self.Name == "PcdPeim" or self.Name == "PcdDxe": + CreatePcdDatabaseCode(self, TemplateString(), TemplateString()) + return True + return False + ## Create makefile for the module and its dependent libraries # # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of # dependent libraries will be created # - def CreateMakeFile(self, CreateLibraryMakeFile=True): + def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []): # Ignore generating makefile when it is a binary module if self.IsBinaryModule: return if self.IsMakeFileCreated: return - if self.CanSkip(): - return - + self.GenFfsList = GenFfsList if not self.IsLibrary and CreateLibraryMakeFile: for LibraryAutoGen in self.LibraryAutoGenList: LibraryAutoGen.CreateMakeFile() + if self.CanSkip(): + return + if len(self.CustomMakefile) == 0: Makefile = GenMake.ModuleMakefile(self) else: @@ -4117,8 +4372,6 @@ class ModuleAutoGen(AutoGen): def CreateCodeFile(self, CreateLibraryCodeFile=True): if self.IsCodeFileCreated: return - if self.CanSkip(): - return # Need to generate PcdDatabase even PcdDriver is binarymodule if self.IsBinaryModule and self.PcdIsDriver != '': @@ -4133,6 +4386,9 @@ class ModuleAutoGen(AutoGen): for LibraryAutoGen in self.LibraryAutoGenList: LibraryAutoGen.CreateCodeFile() + if self.CanSkip(): + return + AutoGenList = [] IgoredAutoGenList = [] @@ -4198,8 +4454,54 @@ class ModuleAutoGen(AutoGen): self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE) return self._LibraryAutoGenList + def GenModuleHash(self): + if self.Arch not in GlobalData.gModuleHash: + GlobalData.gModuleHash[self.Arch] = {} + m = hashlib.md5() + # Add Platform level hash + m.update(GlobalData.gPlatformHash) + # Add Package level hash + if self.DependentPackageList: + for Pkg in self.DependentPackageList: + if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]: + m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName]) + + # Add Library hash + if self.LibraryAutoGenList: + for Lib in self.LibraryAutoGenList: + if Lib.Name not in GlobalData.gModuleHash[self.Arch]: + Lib.GenModuleHash() + m.update(GlobalData.gModuleHash[self.Arch][Lib.Name]) + + # Add Module self + f = open(str(self.MetaFile), 'r') + Content = f.read() + f.close() + m.update(Content) + # Add Module's source files + if self.SourceFileList: + for File in self.SourceFileList: + f = open(str(File), 'r') + Content = f.read() + f.close() + m.update(Content) + + ModuleHashFile = path.join(self.BuildDir, self.Name + ".hash") + if self.Name not in GlobalData.gModuleHash[self.Arch]: + GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest() + if GlobalData.gBinCacheSource: + CacheValid = self.AttemptModuleCacheCopy() + if CacheValid: + return False + return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True) + ## Decide whether we can skip the ModuleAutoGen process - # If any source file is newer than the modeule than we cannot skip + def CanSkipbyHash(self): + if GlobalData.gUseHashCache: + return not self.GenModuleHash() + + ## Decide whether we can skip the ModuleAutoGen process + # If any source file is newer than the module than we cannot skip # def CanSkip(self): if not os.path.exists(self.GetTimeStampPath()): @@ -4214,6 +4516,8 @@ class ModuleAutoGen(AutoGen): with open(self.GetTimeStampPath(),'r') as f: for source in f: source = source.rstrip('\n') + if not os.path.exists(source): + return False if source not in ModuleAutoGen.TimeDict : ModuleAutoGen.TimeDict[source] = os.stat(source)[8] if ModuleAutoGen.TimeDict[source] > DstTimeStamp: @@ -4261,6 +4565,7 @@ class ModuleAutoGen(AutoGen): IsBinaryModule = property(_IsBinaryModule) BuildDir = property(_GetBuildDir) OutputDir = property(_GetOutputDir) + FfsOutputDir = property(_GetFfsOutputDir) DebugDir = property(_GetDebugDir) MakeFileDir = property(_GetMakeFileDir) CustomMakefile = property(_GetCustomMakefile)