# @param SkuId SKU id from command line\r
#\r
def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,\r
- BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None, \r
+ BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,\r
Progress=None, BuildModule=None):\r
if Fds is None:\r
Fds = []\r
\r
# Validate build target\r
if self.BuildTarget not in self.Platform.BuildTargets:\r
- EdkLogger.error("build", PARAMETER_INVALID, \r
+ EdkLogger.error("build", PARAMETER_INVALID,\r
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"\r
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))\r
\r
# parse FDF file to get PCDs in it, if any\r
if not self.FdfFile:\r
self.FdfFile = self.Platform.FlashDefinition\r
- \r
+\r
EdkLogger.info("")\r
if self.ArchList:\r
EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))\r
EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))\r
- EdkLogger.info('%-16s = %s' % ("Toolchain",self.ToolChain)) \r
- \r
+ EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))\r
+\r
EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))\r
if BuildModule:\r
EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))\r
- \r
+\r
if self.FdfFile:\r
EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))\r
\r
EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)\r
- \r
+\r
if Progress:\r
Progress.Start("\nProcessing meta-data")\r
- \r
+\r
if self.FdfFile:\r
#\r
# Mark now build in AutoGen Phase\r
#\r
- GlobalData.gAutoGenPhase = True \r
+ GlobalData.gAutoGenPhase = True\r
Fdf = FdfParser(self.FdfFile.Path)\r
Fdf.ParseFile()\r
GlobalData.gFdfParser = Fdf\r
if self.CapTargetList:\r
EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))\r
self.CapTargetList = []\r
- \r
+\r
# apply SKU and inject PCDs from Flash Definition file\r
for Arch in self.ArchList:\r
Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]\r
Pa.CollectPlatformDynamicPcds()\r
Pa.CollectFixedAtBuildPcds()\r
self.AutoGenObjectList.append(Pa)\r
- \r
+\r
#\r
# Check PCDs token value conflict in each DEC file.\r
#\r
self._CheckAllPcdsTokenValueConflict()\r
- \r
+\r
#\r
# Check PCD type and definition between DSC and DEC\r
#\r
#\r
# Get INF file GUID\r
#\r
- InfFoundFlag = False \r
+ InfFoundFlag = False\r
for Pa in self.AutoGenObjectList:\r
if InfFoundFlag:\r
break\r
_GuidDict[Module.Guid.upper()] = FfsFile\r
break\r
else:\r
- EdkLogger.error("build", \r
+ EdkLogger.error("build",\r
FORMAT_INVALID,\r
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,\r
+ "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,\r
FfsFile.CurrentLineContent,\r
_GuidDict[Module.Guid.upper()].CurrentLineNum,\r
_GuidDict[Module.Guid.upper()].CurrentLineContent,\r
InfPath = NormPath(FfsFile.InfFileName)\r
if not os.path.exists(InfPath):\r
EdkLogger.error('build', GENFDS_ERROR, "Non-existant Module %s !" % (FfsFile.InfFileName))\r
- \r
+\r
PathClassObj = PathClass(FfsFile.InfFileName, self.WorkspaceDir)\r
#\r
# Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use \r
if not InfObj.Guid.upper() in _GuidDict.keys():\r
_GuidDict[InfObj.Guid.upper()] = FfsFile\r
else:\r
- EdkLogger.error("build", \r
+ EdkLogger.error("build",\r
FORMAT_INVALID,\r
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,\r
+ "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,\r
FfsFile.CurrentLineContent,\r
_GuidDict[InfObj.Guid.upper()].CurrentLineNum,\r
_GuidDict[InfObj.Guid.upper()].CurrentLineContent,\r
InfObj.Guid.upper()),\r
ExtraData=self.FdfFile)\r
InfFoundFlag = False\r
- \r
+\r
if FfsFile.NameGuid != None:\r
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")\r
- \r
+\r
#\r
# If the NameGuid reference a PCD name. \r
# The style must match: PCD(xxxx.yyy)\r
# First convert from CFormatGuid to GUID string\r
#\r
_PcdGuidString = GuidStructureStringToGuidString(PcdItem.DefaultValue)\r
- \r
+\r
if not _PcdGuidString:\r
#\r
# Then try Byte array.\r
#\r
_PcdGuidString = GuidStructureByteArrayToGuidString(PcdItem.DefaultValue)\r
- \r
+\r
if not _PcdGuidString:\r
#\r
# Not Byte array or CFormat GUID, raise error.\r
#\r
EdkLogger.error("build",\r
FORMAT_INVALID,\r
- "The format of PCD value is incorrect. PCD: %s , Value: %s\n"%(_PcdName, PcdItem.DefaultValue),\r
+ "The format of PCD value is incorrect. PCD: %s , Value: %s\n" % (_PcdName, PcdItem.DefaultValue),\r
ExtraData=self.FdfFile)\r
- \r
- if not _PcdGuidString.upper() in _GuidDict.keys(): \r
+\r
+ if not _PcdGuidString.upper() in _GuidDict.keys():\r
_GuidDict[_PcdGuidString.upper()] = FfsFile\r
PcdFoundFlag = True\r
break\r
else:\r
- EdkLogger.error("build", \r
+ EdkLogger.error("build",\r
FORMAT_INVALID,\r
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,\r
+ "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,\r
FfsFile.CurrentLineContent,\r
_GuidDict[_PcdGuidString.upper()].CurrentLineNum,\r
_GuidDict[_PcdGuidString.upper()].CurrentLineContent,\r
FfsFile.NameGuid.upper()),\r
- ExtraData=self.FdfFile) \r
- \r
+ ExtraData=self.FdfFile)\r
+\r
if not FfsFile.NameGuid.upper() in _GuidDict.keys():\r
_GuidDict[FfsFile.NameGuid.upper()] = FfsFile\r
else:\r
#\r
# Two raw file GUID conflict.\r
#\r
- EdkLogger.error("build", \r
+ EdkLogger.error("build",\r
FORMAT_INVALID,\r
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum,\r
+ "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,\r
FfsFile.CurrentLineContent,\r
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum,\r
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent,\r
FfsFile.NameGuid.upper()),\r
ExtraData=self.FdfFile)\r
- \r
+\r
\r
def _CheckPcdDefineAndType(self):\r
PcdTypeList = [\r
# Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid\r
for Pcd in Pa.Platform.Pcds:\r
PcdType = Pa.Platform.Pcds[Pcd].Type\r
- \r
+\r
# If no PCD type, this PCD comes from FDF \r
if not PcdType:\r
continue\r
- \r
+\r
# Try to remove Hii and Vpd suffix\r
if PcdType.startswith("DynamicEx"):\r
PcdType = "DynamicEx"\r
elif PcdType.startswith("Dynamic"):\r
PcdType = "Dynamic"\r
- \r
+\r
for Package in Pa.PackageList:\r
# Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType\r
if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:\r
# BuildCommand should be all the same. So just get one from platform AutoGen\r
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand\r
return self._BuildCommand\r
- \r
+\r
## Check the PCDs token value conflict in each DEC file.\r
#\r
# Will cause build break and raise error message while two PCDs conflict.\r
#\r
# Sort same token value PCD list with TokenGuid and TokenCName\r
#\r
- SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName))) \r
- SameTokenValuePcdListCount = 0 \r
+ SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))\r
+ SameTokenValuePcdListCount = 0\r
while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):\r
- TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]\r
- TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1] \r
- \r
+ TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]\r
+ TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]\r
+\r
if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):\r
EdkLogger.error(\r
'build',\r
SameTokenValuePcdListCount += 1\r
Count += SameTokenValuePcdListCount\r
Count += 1\r
- \r
+\r
PcdList = Package.Pcds.values()\r
- PcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))\r
+ PcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))\r
Count = 0\r
while (Count < len(PcdList) - 1) :\r
Item = PcdList[Count]\r
- ItemNext = PcdList[Count + 1] \r
+ ItemNext = PcdList[Count + 1]\r
#\r
# Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.\r
#\r
"0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE\r
"0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE\r
"0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)\r
- \r
+\r
## The real constructor of PlatformAutoGen\r
#\r
# This method is not supposed to be called by users of PlatformAutoGen. It's\r
#GuidValue.update(M.Guids)\r
\r
self.Platform.Modules[F].M = M\r
- \r
- for PcdFromModule in M.ModulePcdList+M.LibraryPcdList:\r
+\r
+ for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:\r
# make sure that the "VOID*" kind of datum has MaxDatumSize set\r
if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:\r
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))\r
if (self.Workspace.ArchList[-1] == self.Arch): \r
for Pcd in self._DynamicPcdList:\r
# just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]\r
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]\r
Sku.VpdOffset = Sku.VpdOffset.strip()\r
- \r
+\r
PcdValue = Sku.DefaultValue\r
if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):\r
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
else:\r
OtherPcdArray.append(Pcd)\r
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
- VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd \r
- \r
+ VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd\r
+\r
PlatformPcds = self.Platform.Pcds.keys()\r
- PlatformPcds.sort() \r
+ PlatformPcds.sort()\r
#\r
# Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.\r
#\r
if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':\r
EdkLogger.error("Build", FILE_NOT_FOUND, \\r
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
- \r
- \r
+\r
+\r
#\r
# Fix the PCDs define in VPD PCD section that never referenced by module.\r
# An example is PCD for signature usage.\r
if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r
(VpdPcd.TokenCName == DscPcdEntry.TokenCName):\r
FoundFlag = True\r
- \r
+\r
# Not found, it should be signature\r
if not FoundFlag :\r
# just pick the a value to determine whether is unicode string type\r
VpdFile.GetCount() != 0:\r
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, \r
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r
- \r
+\r
if VpdFile.GetCount() != 0:\r
DscTimeStamp = self.Platform.MetaFile.TimeStamp\r
FvPath = os.path.join(self.BuildDir, "FV")\r
os.makedirs(FvPath)\r
except:\r
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r
- \r
- \r
+\r
+\r
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r
\r
- \r
+\r
if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:\r
VpdFile.Write(VpdFilePath)\r
- \r
+\r
# retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r
BPDGToolName = None\r
for ToolDef in self.ToolDefinition.values():\r
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r
else:\r
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
- \r
+\r
# Process VPD map file generated by third party BPDG tool\r
if NeedProcessVpdMapFile:\r
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)\r
if os.path.exists(VpdMapFilePath):\r
VpdFile.Read(VpdMapFilePath)\r
- \r
+\r
# Fixup "*" offset\r
for Pcd in self._DynamicPcdList:\r
# just pick the a value to determine whether is unicode string type\r
i += 1\r
else:\r
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
- \r
+\r
# Delete the DynamicPcdList At the last time enter into this function \r
- del self._DynamicPcdList[:] \r
+ del self._DynamicPcdList[:]\r
self._DynamicPcdList.extend(UnicodePcdArray)\r
self._DynamicPcdList.extend(HiiPcdArray)\r
self._DynamicPcdList.extend(OtherPcdArray)\r
else:\r
if self._BuildRule._FileVersion < AutoGenReqBuildRuleVerNum :\r
# If Build Rule's version is less than the version number required by the tools, halting the build.\r
- EdkLogger.error("build", AUTOGEN_ERROR, \r
+ EdkLogger.error("build", AUTOGEN_ERROR,\r
ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\\r
% (self._BuildRule._FileVersion, AutoGenReqBuildRuleVerNum))\r
- \r
+\r
return self._BuildRule\r
\r
## Summarize the packages used by modules in this platform\r
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
TokenNumber += 1\r
- \r
+\r
for Pcd in self.DynamicPcdList:\r
if Pcd.Phase == "PEI":\r
if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:\r
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
TokenNumber += 1\r
- \r
+\r
for Pcd in self.DynamicPcdList:\r
if Pcd.Phase == "DXE":\r
if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]:\r
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
TokenNumber += 1\r
- \r
+\r
for Pcd in self.DynamicPcdList:\r
if Pcd.Phase == "DXE":\r
if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:\r
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
TokenNumber += 1\r
- \r
+\r
for Pcd in self.NonDynamicPcdList:\r
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
TokenNumber += 1\r
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\\r
and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):\r
if ToPcd.Type.strip() == "DynamicEx":\r
- ToPcd.Type = FromPcd.Type \r
+ ToPcd.Type = FromPcd.Type\r
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \\r
and ToPcd.Type != FromPcd.Type:\r
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",\r
#\r
def ApplyPcdSetting(self, Module, Pcds):\r
# for each PCD in module\r
- for Name,Guid in Pcds:\r
- PcdInModule = Pcds[Name,Guid]\r
+ for Name, Guid in Pcds:\r
+ PcdInModule = Pcds[Name, Guid]\r
# find out the PCD setting in platform\r
- if (Name,Guid) in self.Platform.Pcds:\r
- PcdInPlatform = self.Platform.Pcds[Name,Guid]\r
+ if (Name, Guid) in self.Platform.Pcds:\r
+ PcdInPlatform = self.Platform.Pcds[Name, Guid]\r
else:\r
PcdInPlatform = None\r
# then override the settings if any\r
# @retval Value Priority value based on the priority list.\r
#\r
def CalculatePriorityValue(self, Key):\r
- Target, ToolChain, Arch, CommandType, Attr = Key.split('_') \r
- PriorityValue = 0x11111 \r
+ Target, ToolChain, Arch, CommandType, Attr = Key.split('_')\r
+ PriorityValue = 0x11111\r
if Target == "*":\r
PriorityValue &= 0x01111\r
if ToolChain == "*":\r
PriorityValue &= 0x11101\r
if Attr == "*":\r
PriorityValue &= 0x11110\r
- \r
- return self.PrioList["0x%0.5x"%PriorityValue]\r
- \r
+\r
+ return self.PrioList["0x%0.5x" % PriorityValue]\r
+\r
\r
## Expand * in build option key\r
#\r
BuildOptions = {}\r
FamilyMatch = False\r
FamilyIsNull = True\r
- \r
+\r
OverrideList = {}\r
#\r
# Construct a list contain the build options which need override.\r
if ToolChain == self.ToolChain or ToolChain == "*":\r
if Arch == self.Arch or Arch == "*":\r
if Options[Key].startswith("="):\r
- if OverrideList.get(Key[1]) != None: \r
+ if OverrideList.get(Key[1]) != None:\r
OverrideList.pop(Key[1])\r
OverrideList[Key[1]] = Options[Key]\r
\r
# Use the highest priority value. \r
#\r
if (len(OverrideList) >= 2):\r
- KeyList = OverrideList.keys()\r
+ KeyList = OverrideList.keys()\r
for Index in range(len(KeyList)):\r
- NowKey = KeyList[Index]\r
+ NowKey = KeyList[Index]\r
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")\r
for Index1 in range(len(KeyList) - Index - 1):\r
NextKey = KeyList[Index1 + Index + 1]\r
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":\r
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":\r
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):\r
- if Options.get((self.BuildRuleFamily, NextKey)) != None: \r
+ if Options.get((self.BuildRuleFamily, NextKey)) != None:\r
Options.pop((self.BuildRuleFamily, NextKey))\r
else:\r
- if Options.get((self.BuildRuleFamily, NowKey)) != None: \r
+ if Options.get((self.BuildRuleFamily, NowKey)) != None:\r
Options.pop((self.BuildRuleFamily, NowKey))\r
\r
for Key in Options:\r
Family = Key[0]\r
Target, Tag, Arch, Tool, Attr = Key[1].split("_")\r
# if tool chain family doesn't match, skip it\r
- if Tool not in self.ToolDefinition or Family =="":\r
+ if Tool not in self.ToolDefinition or Family == "":\r
continue\r
# option has been added before\r
if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:\r
# is the former use /I , the Latter used -I to specify include directories\r
#\r
if self.PlatformInfo.ToolChainFamily in ('MSFT'):\r
- gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE|re.DOTALL)\r
+ gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
- gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE|re.DOTALL)\r
+ gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
else:\r
#\r
# New ToolChainFamily, don't known whether there is option to specify include directories\r
if self.AutoGenVersion >= 0x00010005 and len(IncPathList) > 0:\r
for Path in IncPathList:\r
if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r
- ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption) \r
- EdkLogger.error("build", \r
+ ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r
+ EdkLogger.error("build",\r
PARAMETER_INVALID,\r
- ExtraData = ErrMsg, \r
- File = str(self.MetaFile))\r
+ ExtraData=ErrMsg,\r
+ File=str(self.MetaFile))\r
\r
\r
BuildOptionIncPathList += IncPathList\r
if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:\r
# Skip all files that are not binary libraries\r
if not self.IsLibrary:\r
- continue \r
+ continue\r
RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r
elif FileType in self.BuildRules:\r
RuleObject = self.BuildRules[FileType]\r
# Also find all packages that the DynamicEx PCDs depend on\r
Pcds = []\r
PatchablePcds = {}\r
- Packages = [] \r
+ Packages = []\r
PcdCheckList = []\r
PcdTokenSpaceList = []\r
for Pcd in self.ModulePcdList + self.LibraryPcdList:\r
'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r
'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r
'module_arch' : self.Arch,\r
- 'package_item' : ['%s' % (Package.MetaFile.File.replace('\\','/')) for Package in Packages],\r
+ 'package_item' : ['%s' % (Package.MetaFile.File.replace('\\', '/')) for Package in Packages],\r
'binary_item' : [],\r
'patchablepcd_item' : [],\r
'pcd_item' : [],\r
if 'PI_SPECIFICATION_VERSION' in self.Specification:\r
AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']]\r
\r
- OutputDir = self.OutputDir.replace('\\','/').strip('/')\r
+ OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
if self.ModuleType in ['BASE', 'USER_DEFINED']:\r
for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/')\r
- if Item.Target.Ext.lower() == '.aml': \r
+ File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
+ if Item.Target.Ext.lower() == '.aml':\r
AsBuiltInfDict['binary_item'] += ['ASL|' + File]\r
- elif Item.Target.Ext.lower() == '.acpi': \r
+ elif Item.Target.Ext.lower() == '.acpi':\r
AsBuiltInfDict['binary_item'] += ['ACPI|' + File]\r
else:\r
AsBuiltInfDict['binary_item'] += ['BIN|' + File]\r
else:\r
for Item in self.CodaTargetList:\r
- File = Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/')\r
- if Item.Target.Ext.lower() == '.efi': \r
+ File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
+ if Item.Target.Ext.lower() == '.efi':\r
AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi']\r
else:\r
AsBuiltInfDict['binary_item'] += ['BIN|' + File]\r
if self.DepexGenerated:\r
if self.ModuleType in ['PEIM']:\r
AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex']\r
- if self.ModuleType in ['DXE_DRIVER','DXE_RUNTIME_DRIVER','DXE_SAL_DRIVER','UEFI_DRIVER']:\r
+ if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'UEFI_DRIVER']:\r
AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name + '.depex']\r
if self.ModuleType in ['DXE_SMM_DRIVER']:\r
AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex']\r
\r
# find the build_rule_version\r
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) <> -1:\r
- if Line.find("=") <> -1 and Line.find("=") < (len(Line)-1) and (Line[(Line.find("=") + 1):]).split():\r
+ if Line.find("=") <> -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():\r
self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]\r
# skip empty or comment line\r
if Line == "" or Line[0] == "#":\r
if TokenList[0] == "BUILD":\r
if len(TokenList) == 1:\r
EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
\r
FileType = TokenList[1]\r
if FileType == '':\r
EdkLogger.error("build", FORMAT_INVALID, "No file type given",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
if self._FileTypePattern.match(FileType) == None:\r
- EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1,\r
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")\r
# new format: File-Type.Build-Type.Arch\r
else:\r
elif FileType != TokenList[0]:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"Different file types are not allowed in the same rule section",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
if len(TokenList) > 1:\r
BuildType = TokenList[1]\r
if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"Specific build types must not be mixed with common one",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
if 'COMMON' in self._ArchList and len(self._ArchList) > 1:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"Specific ARCH must not be mixed with common one",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
\r
self._FileType = FileType\r
elif SectionType != Type:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"Two different section types are not allowed in the same sub-section",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
\r
if len(TokenList) > 1:\r
if 'COMMON' in FamilyList and len(FamilyList) > 1:\r
EdkLogger.error("build", FORMAT_INVALID,\r
"Specific tool chain family should not be mixed with general one",\r
- File=self.RuleFile, Line=LineIndex+1,\r
+ File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
if self._State not in self._StateHandler:\r
- EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1,\r
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])\r
## Parse <InputFile> sub-section\r
#\r
# don't generate depex if only TRUE operand left\r
if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and NewOperand[0] == 'TRUE':\r
self.PostfixNotation = []\r
- return \r
+ return\r
\r
# don't generate depex if all operands are architecture protocols\r
if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \\r
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)\r
if Option.OutputFile != None:\r
FileChangeFlag = Dpx.Generate(Option.OutputFile)\r
- if not FileChangeFlag and DxsFile: \r
+ if not FileChangeFlag and DxsFile:\r
#\r
# Touch the output file if its time stamp is older than the original\r
# DXS file to avoid re-invoke this tool for the dependency check in build rule.\r
import Common.GlobalData as GlobalData\r
\r
## Regular expression for finding header file inclusions\r
-gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE|re.UNICODE|re.IGNORECASE)\r
+gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)\r
\r
## Regular expression for matching macro used in header file inclusion\r
gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)\r
\r
# convert source files and binary files to build targets\r
self.ResultFileList = [str(T.Target) for T in self._AutoGenObject.CodaTargetList]\r
- if len(self.ResultFileList) == 0 and len(self._AutoGenObject.SourceFileList) <> 0: \r
+ if len(self.ResultFileList) == 0 and len(self._AutoGenObject.SourceFileList) <> 0:\r
EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",\r
ExtraData="[%s]" % str(self._AutoGenObject))\r
\r
FileMacro = ""\r
IncludePathList = []\r
for P in self._AutoGenObject.IncludePathList:\r
- IncludePathList.append(IncPrefix+self.PlaceMacro(P, self.Macros))\r
+ IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))\r
if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:\r
- self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix+P)\r
+ self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)\r
FileMacro += self._FILE_MACRO_TEMPLATE.Replace(\r
{\r
"macro_name" : "INC",\r
\r
# Generate macros used to represent files containing list of input files\r
for ListFileMacro in self.ListFileMacros:\r
- ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro)-5])\r
+ ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])\r
FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))\r
SaveFileOnChange(\r
ListFileName,\r
try:\r
Fd = open(F.Path, 'r')\r
except BaseException, X:\r
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path+"\n\t"+str(X))\r
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))\r
\r
FileContent = Fd.read()\r
Fd.close()\r
\r
DbTotal = [InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable, \r
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen, PcdTokenTable,PcdCNameTable, \r
- SizeTableValue, InitValueUint16, VardefValueUint16,InitValueUint8, VardefValueUint8, InitValueBoolean,\r
+ SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,\r
VardefValueBoolean, SkuidValue, SkuIndexValue, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]\r
DbItemTotal = [DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable, \r
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable, \r
- DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16,DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,\r
+ DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,\r
DbVardefValueBoolean, DbSkuidValue, DbSkuIndexValue, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]\r
\r
# SkuidValue is the last table in the init table items\r
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)\r
elif Sku.DefaultValue[0] == '"':\r
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)\r
- Size = len(Sku.DefaultValue) -2 + 1\r
+ Size = len(Sku.DefaultValue) - 2 + 1\r
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)\r
elif Sku.DefaultValue[0] == '{':\r
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)\r
Pcd.InitString = 'INIT'\r
else:\r
if int(Sku.DefaultValue, 0) != 0:\r
- Pcd.InitString = 'INIT' \r
+ Pcd.InitString = 'INIT'\r
#\r
# For UNIT64 type PCD's value, ULL should be append to avoid\r
# warning under linux building environment.\r
# @retval: A list for formatted hex string\r
#\r
def DecToHexList(Dec, Digit = 8):\r
- Hex = eval("'%0" + str(Digit) + "X' % int(Dec)" )\r
+ Hex = eval("'%0" + str(Digit) + "X' % int(Dec)")\r
List = []\r
for Bit in range(Digit - 2, -1, -2):\r
List.append(HexHeader + Hex[Bit:Bit + 2])\r
Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED\r
UnusedStr = WriteLine(UnusedStr, Line)\r
\r
- Str = ''.join([Str,UnusedStr])\r
+ Str = ''.join([Str, UnusedStr])\r
\r
Str = WriteLine(Str, '')\r
if IsCompatibleMode or UniGenCFlag:\r
#\r
def CreateBinBuffer(BinBuffer, Array):\r
for Item in Array:\r
- BinBuffer.write(pack("B", int(Item,16)))\r
+ BinBuffer.write(pack("B", int(Item, 16)))\r
\r
## Create a formatted string all items in an array\r
#\r
Index = Index + 1\r
else:\r
ArrayItem = WriteLine(ArrayItem, Line)\r
- Line = ' ' + Item + ', '\r
+ Line = ' ' + Item + ', '\r
Index = 1\r
ArrayItem = Write(ArrayItem, Line.rstrip())\r
\r
\r
if PrimaryTag == UniLanguagePrimaryTag:\r
if UniLanguage not in UniLanguageListFiltered:\r
- UniLanguageListFiltered += [UniLanguage] \r
+ UniLanguageListFiltered += [UniLanguage]\r
break\r
else:\r
# Here is rule 3 for "get best language"\r
\r
UniLanguageList = []\r
for IndexI in range(len(UniObjectClass.LanguageDef)):\r
- UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]] \r
+ UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]\r
\r
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)\r
\r
if UniBinBuffer:\r
CreateBinBuffer (UniBinBuffer, List)\r
UniBinBuffer.write (StringBuffer.getvalue())\r
- UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END,16)))\r
+ UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))\r
StringBuffer.close()\r
\r
#\r
# Create line for string variable name\r
# "unsigned char $(BaseName)Strings[] = {"\r
#\r
- AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n' )\r
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')\r
\r
if IsCompatibleMode:\r
#\r
# Write an item\r
#\r
def Write(Target, Item):\r
- return ''.join([Target,Item])\r
+ return ''.join([Target, Item])\r
\r
#\r
# Write an item with a break line\r
#\r
def WriteLine(Target, Item):\r
- return ''.join([Target,Item,'\n'])\r
+ return ''.join([Target, Item, '\n'])\r
\r
# This acts like the main() function for the script, unless it is 'import'ed into another\r
# script.\r
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);\r
LineNo = GetLineNo(FileIn, Line, False)\r
EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",\r
- ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File = File, Line = LineNo)\r
+ ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)\r
else:\r
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)\r
LangPrintName = Lang[2]\r
if Name != '':\r
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)\r
if MatchString == None or MatchString.end(0) != len(Name):\r
- EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File))\r
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File))\r
LanguageList = Item.split(u'#language ')\r
for IndexI in range(len(LanguageList)):\r
if IndexI == 0:\r
if not self.IsCompatibleMode and Name != '':\r
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)\r
if MatchString == None or MatchString.end(0) != len(Name):\r
- EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File))\r
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File))\r
self.AddStringToList(Name, Language, Value)\r
continue\r
\r
ItemIndexInList = self.OrderedStringDict[Language][Name]\r
Item = self.OrderedStringList[Language][ItemIndexInList]\r
Item.UpdateValue(Value)\r
- Item.UseOtherLangDef = '' \r
+ Item.UseOtherLangDef = ''\r
\r
if IsAdded:\r
Token = len(self.OrderedStringList[Language])\r
self.PcdBinSize = PcdBinSize\r
\r
if self.PcdValue == '' :\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" %(self.PcdCName, self.FileName, self.Lineno))\r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))\r
+\r
if self.PcdOffset == '' :\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" %(self.PcdCName, self.FileName, self.Lineno))\r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))\r
+\r
if self.PcdSize == '' :\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" %(self.PcdCName, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))\r
+\r
self._GenOffsetValue ()\r
- \r
+\r
## Analyze the string value to judge the PCD's datum type euqal to Boolean or not.\r
# \r
# @param ValueString PCD's value\r
if ValueString.upper() in ["TRUE", "FALSE"]:\r
return True\r
elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:\r
- return True \r
- \r
+ return True\r
+\r
return False\r
- \r
+\r
## Convert the PCD's value from string to integer.\r
# \r
# This function will try to convert the Offset value form string to integer\r
try:\r
self.PcdBinOffset = int(self.PcdOffset, 16)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))\r
+\r
## Pack Boolean type VPD PCD's value form string to binary type.\r
# \r
# @param ValueString The boolean type string for pack.\r
# \r
def _PackBooleanValue(self, ValueString):\r
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:\r
- try: \r
- self.PcdValue = pack(_FORMAT_CHAR[1], 1)\r
+ try:\r
+ self.PcdValue = pack(_FORMAT_CHAR[1], 1)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))\r
else:\r
try:\r
- self.PcdValue = pack(_FORMAT_CHAR[1], 0)\r
+ self.PcdValue = pack(_FORMAT_CHAR[1], 0)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))\r
+\r
## Pack Integer type VPD PCD's value form string to binary type.\r
# \r
# @param ValueString The Integer type string for pack.\r
# \r
def _PackIntValue(self, IntValue, Size):\r
if Size not in _FORMAT_CHAR.keys():\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno)) \r
- \r
- if Size == 1: \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))\r
+\r
+ if Size == 1:\r
if IntValue < 0:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"PCD can't be set to negative value %d for PCD %s in UINT8 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif IntValue >= 0x100:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Too large PCD value %d for datum type UINT8 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Too large PCD value %d for datum type UINT8 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif Size == 2:\r
if IntValue < 0:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"PCD can't be set to negative value %d for PCD %s in UINT16 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif IntValue >= 0x10000:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Too large PCD value %d for datum type UINT16 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Too large PCD value %d for datum type UINT16 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif Size == 4:\r
if IntValue < 0:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif IntValue >= 0x100000000:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif Size == 8:\r
if IntValue < 0:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
elif IntValue >= 0x10000000000000000:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))\r
else:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))\r
+\r
try:\r
- self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)\r
+ self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))\r
\r
## Pack VOID* type VPD PCD's value form string to binary type.\r
#\r
elif ValueString.startswith('"') and ValueString.endswith('"'):\r
self._PackString(ValueString, Size)\r
else:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))\r
+\r
## Pack an Ascii PCD value.\r
# \r
# An Ascii string for a PCD should be in format as "".\r
# \r
def _PackString(self, ValueString, Size):\r
if (Size < 0):\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))\r
if (ValueString == ""):\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno)) \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))\r
if (len(ValueString) < 2):\r
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))\r
- \r
+\r
ValueString = ValueString[1:-1]\r
if len(ValueString) + 1 > Size:\r
- EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, \r
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,\r
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))\r
try:\r
- self.PcdValue= pack('%ds' % Size, ValueString)\r
+ self.PcdValue = pack('%ds' % Size, ValueString)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))\r
+\r
## Pack a byte-array PCD value.\r
# \r
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}.\r
# \r
def _PackByteArray(self, ValueString, Size):\r
- if (Size < 0): \r
+ if (Size < 0):\r
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))\r
if (ValueString == ""):\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno)) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))\r
+\r
ValueString = ValueString.strip()\r
ValueString = ValueString.lstrip('{').strip('}')\r
ValueList = ValueString.split(',')\r
ValueList = [item.strip() for item in ValueList]\r
- \r
+\r
if len(ValueList) > Size:\r
- EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, \r
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,\r
"The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))\r
- \r
+\r
ReturnArray = array.array('B')\r
- \r
+\r
for Index in xrange(len(ValueList)):\r
Value = None\r
if ValueList[Index].lower().startswith('0x'):\r
try:\r
Value = int(ValueList[Index], 16)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \\r
(ValueList[Index], ValueString, self.FileName, self.Lineno))\r
else:\r
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \\r
(ValueList[Index], ValueString, self.FileName, self.Lineno))\r
- \r
+\r
if Value > 255:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
- "The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" %\\r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
+ "The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" % \\r
(ValueList[Index], ValueString, self.FileName, self.Lineno))\r
- \r
+\r
ReturnArray.append(Value)\r
- \r
+\r
for Index in xrange(len(ValueList), Size):\r
ReturnArray.append(0)\r
- \r
- self.PcdValue = ReturnArray.tolist()\r
+\r
+ self.PcdValue = ReturnArray.tolist()\r
\r
## Pack a unicode PCD value into byte array.\r
# \r
# A unicode string for a PCD should be in format as L"".\r
#\r
def _PackUnicode(self, UnicodeString, Size):\r
- if (Size < 0): \r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" %\\r
+ if (Size < 0):\r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % \\r
(self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))\r
if (len(UnicodeString) < 3):\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" %\\r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % \\r
(self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))\r
- \r
+\r
UnicodeString = UnicodeString[2:-1]\r
- \r
+\r
if (len(UnicodeString) + 1) * 2 > Size:\r
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,\r
"The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \\r
(UnicodeString, Size, self.FileName, self.Lineno))\r
- \r
+\r
ReturnArray = array.array('B')\r
for Value in UnicodeString:\r
try:\r
ReturnArray.append(ord(Value))\r
ReturnArray.append(0)\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, \r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \\r
(Value, UnicodeString, self.FileName, self.Lineno))\r
- \r
+\r
for Index in xrange(len(UnicodeString) * 2, Size):\r
ReturnArray.append(0)\r
- \r
- self.PcdValue = ReturnArray.tolist() \r
+\r
+ self.PcdValue = ReturnArray.tolist()\r
\r
\r
\r
# 3. Fixed offset if needed;\r
# 4. Generate output file, including guided.map and guided.bin file;\r
# \r
-class GenVPD : \r
+class GenVPD :\r
## Constructor of DscBuildData\r
#\r
# Initialize object of GenVPD\r
try:\r
self.FileLinesList = fInputfile.readlines()\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" %InputFileName,None)\r
+ EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" % InputFileName, None)\r
finally:\r
fInputfile.close()\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %InputFileName,None)\r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)\r
+\r
##\r
# Parser the input file which is generated by the build tool. Convert the value of each pcd's \r
# from string to it's real format. Also remove the useless line in the input file.\r
# \r
def ParserInputFile (self):\r
- count = 0 \r
+ count = 0\r
for line in self.FileLinesList:\r
# Strip "\r\n" generated by readlines ().\r
line = line.strip()\r
line = line.rstrip(os.linesep)\r
- \r
+\r
# Skip the comment line\r
if (not line.startswith("#")) and len(line) > 1 :\r
#\r
# Enhanced for support "|" character in the string.\r
#\r
ValueList = ['', '', '', '',''] \r
- \r
- ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')\r
+\r
+ ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')\r
PtrValue = ValueRe.findall(line)\r
- \r
+\r
ValueUpdateFlag = False\r
- \r
+\r
if len(PtrValue) >= 1:\r
line = re.sub(ValueRe, '', line)\r
- ValueUpdateFlag = True \r
- \r
+ ValueUpdateFlag = True\r
+\r
TokenList = line.split('|')\r
ValueList[0:len(TokenList)] = TokenList\r
- \r
+\r
if ValueUpdateFlag:\r
ValueList[4] = PtrValue[0] \r
self.FileLinesList[count] = ValueList\r
# Store the line number\r
- self.FileLinesList[count].append(str(count+1))\r
+ self.FileLinesList[count].append(str(count + 1))\r
elif len(line) <= 1 :\r
# Set the blank line to "None"\r
self.FileLinesList[count] = None\r
# Set the comment line to "None"\r
self.FileLinesList[count] = None\r
count += 1\r
- \r
+\r
# The line count contain usage information\r
- count = 0 \r
+ count = 0\r
# Delete useless lines\r
while (True) :\r
try :\r
else :\r
count += 1\r
except :\r
- break \r
+ break\r
#\r
# After remove the useless line, if there are no data remain in the file line list,\r
# Report warning messages to user's.\r
# \r
if len(self.FileLinesList) == 0 :\r
- EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE, \r
+ EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,\r
"There are no VPD type pcds defined in DSC file, Please check it.")\r
- \r
+\r
# Process the pcds one by one base on the pcd's value and size\r
count = 0\r
- for line in self.FileLinesList: \r
+ for line in self.FileLinesList:\r
if line != None :\r
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName) \r
# Strip the space char\r
PCD.PcdBinSize = PackSize\r
except:\r
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))\r
- \r
+\r
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):\r
PCD._PackBooleanValue(PCD.PcdValue)\r
self.FileLinesList[count] = PCD\r
# Try to translate value to an integer firstly.\r
#\r
IsInteger = True\r
- PackValue = None\r
+ PackValue = None\r
try:\r
PackValue = int(PCD.PcdValue)\r
except:\r
PackValue = int(PCD.PcdValue, 16)\r
except:\r
IsInteger = False\r
- \r
+\r
if IsInteger:\r
PCD._PackIntValue(PackValue, PackSize)\r
else:\r
PCD._PackPtrValue(PCD.PcdValue, PackSize)\r
- \r
+\r
self.FileLinesList[count] = PCD\r
count += 1\r
else :\r
continue\r
- \r
+\r
##\r
# This function used to create a clean list only contain useful information and reorganized to make it \r
# easy to be sorted\r
#\r
def FormatFileLine (self) :\r
- \r
+\r
for eachPcd in self.FileLinesList :\r
if eachPcd.PcdOffset != '*' :\r
# Use pcd's Offset value as key, and pcd's Value as value \r
else :\r
# Use pcd's CName as key, and pcd's Size as value\r
self.PcdUnknownOffsetList.append(eachPcd)\r
- \r
- \r
+\r
+\r
##\r
# This function is use to fix the offset value which the not specified in the map file.\r
# Usually it use the star (meaning any offset) character in the offset field\r
# \r
- def FixVpdOffset (self): \r
+ def FixVpdOffset (self):\r
# At first, the offset should start at 0\r
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset\r
# value is "*" to insert into. \r
- \r
- self.PcdFixedOffsetSizeList.sort(lambda x,y: cmp(x.PcdBinOffset, y.PcdBinOffset)) \r
- \r
+\r
+ self.PcdFixedOffsetSizeList.sort(lambda x, y: cmp(x.PcdBinOffset, y.PcdBinOffset))\r
+\r
#\r
# Sort the un-fixed pcd's offset by it's size.\r
#\r
- self.PcdUnknownOffsetList.sort(lambda x,y: cmp(x.PcdBinSize, y.PcdBinSize))\r
- \r
+ self.PcdUnknownOffsetList.sort(lambda x, y: cmp(x.PcdBinSize, y.PcdBinSize))\r
+\r
#\r
# Process all Offset value are "*"\r
#\r
if (len(self.PcdFixedOffsetSizeList) == 0) and (len(self.PcdUnknownOffsetList) != 0) :\r
# The offset start from 0\r
NowOffset = 0\r
- for Pcd in self.PcdUnknownOffsetList : \r
+ for Pcd in self.PcdUnknownOffsetList :\r
Pcd.PcdBinOffset = NowOffset\r
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))\r
NowOffset += Pcd.PcdBinSize\r
\r
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList\r
return\r
- \r
+\r
# Check the offset of VPD type pcd's offset start from 0. \r
- if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :\r
+ if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :\r
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",\r
- None) \r
- \r
+ None)\r
+\r
# Judge whether the offset in fixed pcd offset list is overlapped or not.\r
lenOfList = len(self.PcdFixedOffsetSizeList)\r
count = 0 \r
PcdNext = self.PcdFixedOffsetSizeList[count+1]\r
# Two pcd's offset is same \r
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :\r
- EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, \r
- "The offset of %s at line: %s is same with %s at line: %s in file %s" %\\r
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,\r
+ "The offset of %s at line: %s is same with %s at line: %s in file %s" % \\r
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),\r
None)\r
- \r
+\r
# Overlapped \r
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset :\r
- EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, \r
- "The offset of %s at line: %s is overlapped with %s at line: %s in file %s" %\\r
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,\r
+ "The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \\r
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),\r
None)\r
- \r
+\r
# Has free space, raise a warning message \r
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset :\r
- EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, \r
- "The offsets have free space of between %s at line: %s and %s at line: %s in file %s" %\\r
+ EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,\r
+ "The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \\r
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),\r
None)\r
count += 1\r
if LastOffset < NowOffset :\r
if lenOfUnfixedList != 0 :\r
countOfUnfixedList = 0\r
- while(countOfUnfixedList < lenOfUnfixedList) : \r
+ while(countOfUnfixedList < lenOfUnfixedList) :\r
eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList]\r
needFixPcdSize = eachUnfixedPcd.PcdBinSize\r
# Not been fixed\r
FixOffsetSizeListCount += 1\r
# Usually it will not enter into this thunk, if so, means it overlapped. \r
else :\r
- EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, \r
- "The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" %\\r
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,\r
+ "The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" % \\r
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),\r
None)\r
FixOffsetSizeListCount += 1\r
#Open an VPD file to process\r
\r
try:\r
- fVpdFile = open (BinFileName, "wb", 0) \r
+ fVpdFile = open(BinFileName, "wb", 0)\r
except:\r
# Open failed\r
- EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.VpdFileName,None)\r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)\r
+\r
try :\r
- fMapFile = open (MapFileName, "w", 0)\r
+ fMapFile = open(MapFileName, "w", 0)\r
except:\r
# Open failed\r
- EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.MapFileName,None)\r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)\r
+\r
# Use a instance of StringIO to cache data\r
- fStringIO = StringIO.StringIO('') \r
- \r
+ fStringIO = StringIO.StringIO('')\r
+\r
# Write the header of map file.\r
try :\r
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)\r
+\r
for eachPcd in self.PcdFixedOffsetSizeList :\r
# write map file\r
try :\r
fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue))\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None) \r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)\r
+\r
# Write Vpd binary file\r
- fStringIO.seek (eachPcd.PcdBinOffset) \r
+ fStringIO.seek (eachPcd.PcdBinOffset)\r
if isinstance(eachPcd.PcdValue, list):\r
ValueList = [chr(Item) for Item in eachPcd.PcdValue]\r
- fStringIO.write(''.join(ValueList)) \r
- else: \r
+ fStringIO.write(''.join(ValueList))\r
+ else:\r
fStringIO.write (eachPcd.PcdValue)\r
- \r
- try : \r
+\r
+ try :\r
fVpdFile.write (fStringIO.getvalue())\r
except:\r
- EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.VpdFileName,None)\r
- \r
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None)\r
+\r
fStringIO.close ()\r
fVpdFile.close ()\r
fMapFile.close ()\r
#\r
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):\r
try:\r
- F = open(FileName,'r')\r
+ F = open(FileName, 'r')\r
Keys = []\r
for Line in F:\r
if Line.startswith(CommentCharacter):\r
continue\r
- LineList = Line.split(KeySplitCharacter,1)\r
+ LineList = Line.split(KeySplitCharacter, 1)\r
if len(LineList) >= 2:\r
Key = LineList[0].split()\r
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:\r
if ValueSplitFlag:\r
- Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)\r
+ Dictionary[Key[0]] = LineList[1].replace('\\', '/').split(ValueSplitCharacter)\r
else:\r
- Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')\r
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')\r
Keys += [Key[0]]\r
F.close()\r
return 0\r
#\r
# Load TianoCoreOrgLogo, used for GUI tool\r
#\r
- self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF)\r
+ self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'), wx.BITMAP_TYPE_GIF)\r
except:\r
self.Icon = None\r
\r
def XmlParseFileSection (self, FileName, SectionTag):\r
if self.Verbose:\r
print FileName\r
- return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag) \r
+ return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)\r
\r
## Save a XML file\r
#\r
#\r
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):\r
try:\r
- F = open(FileName,'r')\r
+ F = open(FileName, 'r')\r
except:\r
return False\r
Keys = []\r
for Line in F:\r
- LineList = Line.split(KeySplitCharacter,1)\r
+ LineList = Line.split(KeySplitCharacter, 1)\r
if len(LineList) >= 2:\r
Key = LineList[0].split()\r
- if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys: \r
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:\r
if ValueSplitFlag:\r
- Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)\r
+ Dictionary[Key[0]] = LineList[1].replace('\\', '/').split(ValueSplitCharacter)\r
else:\r
- Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')\r
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')\r
Keys += [Key[0]]\r
F.close()\r
return True\r
#\r
def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):\r
try:\r
- F = open(FileName,'r')\r
+ F = open(FileName, 'r')\r
Lines = []\r
Lines = F.readlines()\r
F.close()\r
MaxLength = len(Key)\r
Index = 0\r
for Line in Lines:\r
- LineList = Line.split(KeySplitCharacter,1)\r
+ LineList = Line.split(KeySplitCharacter, 1)\r
if len(LineList) >= 2:\r
Key = LineList[0].split()\r
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary:\r
Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]])\r
Lines.pop(Index)\r
if Key[0] in Keys:\r
- Lines.insert(Index,Line)\r
+ Lines.insert(Index, Line)\r
Keys.remove(Key[0])\r
Index += 1\r
for RemainingKey in Keys:\r
if ValueSplitFlag:\r
- Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter,' '.join(Dictionary[RemainingKey])) \r
+ Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, ' '.join(Dictionary[RemainingKey]))\r
else:\r
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey])\r
Lines.append(Line)\r
try:\r
- F = open(FileName,'w')\r
+ F = open(FileName, 'w')\r
except:\r
return False\r
F.writelines(Lines)\r
# @param File The FDF name\r
# @param Line The Line number that error occurs\r
#\r
- def __init__(self, Str, File = None, Line = None):\r
- \r
+ def __init__(self, Str, File=None, Line=None):\r
+\r
FileLineTuple = GetRealFileLine(File, Line)\r
self.FileName = FileLineTuple[0]\r
self.LineNumber = FileLineTuple[1]\r
else:\r
raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber)\r
return Str\r
- \r
- def __ReplaceFragment(self, StartPos, EndPos, Value = ' '):\r
+\r
+ def __ReplaceFragment(self, StartPos, EndPos, Value=' '):\r
if StartPos[0] == EndPos[0]:\r
Offset = StartPos[1]\r
while Offset <= EndPos[1]:\r
Description = CommonHeader.Description\r
License = CommonHeader.License\r
\r
- Header = "#/** @file\n#\n"\r
+ Header = "#/** @file\n#\n"\r
Header += "# " + Abstract + "\n#\n"\r
Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"\r
Header += "# " + CopyRight + "\n#\n"\r
# @retval Options A optparse object containing the parsed options.\r
# @retval InputFile Path of an source file to be migrated.\r
#\r
-def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):\r
+def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):\r
# use clearer usage to override default usage message\r
UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName\r
Version = "%s Version %.2f" % (ToolName, VersionNumber)\r
from Common.MultipleWorkspace import MultipleWorkspace as mws\r
\r
## Regular expression used to find out place holders in string template\r
-gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE)\r
+gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE)\r
\r
## Dictionary used to store file time stamp for quick re-access\r
gFileTimeStampCache = {} # {file path : file time stamp}\r
def GuidStringToGuidStructureString(Guid):\r
GuidList = Guid.split('-')\r
Result = '{'\r
- for Index in range(0,3,1):\r
+ for Index in range(0, 3, 1):\r
Result = Result + '0x' + GuidList[Index] + ', '\r
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]\r
- for Index in range(0,12,2):\r
- Result = Result + ', 0x' + GuidList[4][Index:Index+2]\r
+ for Index in range(0, 12, 2):\r
+ Result = Result + ', 0x' + GuidList[4][Index:Index + 2]\r
Result += '}}'\r
return Result\r
\r
Fd.write(Content)\r
Fd.close()\r
except IOError, X:\r
- EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X)\r
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
\r
return True\r
\r
#\r
# @retval A list of all files\r
#\r
-def GetFiles(Root, SkipList=None, FullPath = True):\r
+def GetFiles(Root, SkipList=None, FullPath=True):\r
OriPath = Root\r
FileList = []\r
for Root, Dirs, Files in os.walk(Root):\r
if OverrideDir[-1] == os.path.sep:\r
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]\r
else:\r
- return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)]\r
+ return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)]\r
if GlobalData.gAllFiles:\r
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]\r
if not NewFile:\r
if Dir[-1] == os.path.sep:\r
return NewFile[len(Dir):], NewFile[0:len(Dir)]\r
else:\r
- return NewFile[len(Dir)+1:], NewFile[0:len(Dir)]\r
+ return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]\r
else:\r
return NewFile, ''\r
\r
# Replace the default dir to current dir\r
if Dir == '.':\r
Dir = os.getcwd()\r
- Dir = Dir[len(Workspace)+1:]\r
+ Dir = Dir[len(Workspace) + 1:]\r
\r
# First check if File has Edk definition itself\r
if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:\r
# Dir is current module dir related to workspace\r
if Dir == '.':\r
Dir = os.getcwd()\r
- Dir = Dir[len(Workspace)+1:]\r
+ Dir = Dir[len(Workspace) + 1:]\r
\r
NewFile = File\r
RelaPath = AllFiles[os.path.normpath(Dir)]\r
#\r
# PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint\r
#\r
- for PlaceHolder,Start,End in PlaceHolderList:\r
+ for PlaceHolder, Start, End in PlaceHolderList:\r
self._SubSectionList.append(TemplateSection[SubSectionStart:Start])\r
self._SubSectionList.append(TemplateSection[Start:End])\r
self._PlaceHolderList.append(PlaceHolder)\r
if len(key) > 1:\r
RestKeys = key[1:]\r
elif self._Level_ > 1:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
else:\r
FirstKey = key\r
if self._Level_ > 1:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
\r
if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:\r
FirstKey = self._Wildcard\r
if len(key) > 1:\r
RestKeys = key[1:]\r
else:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
else:\r
FirstKey = key\r
if self._Level_ > 1:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
\r
if FirstKey in self._ValidWildcardList:\r
FirstKey = self._Wildcard\r
Pair += 1\r
elif ch == ')' and not InStr:\r
Pair -= 1\r
- \r
+\r
if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT:\r
NewStr += '-'\r
else:\r
IsValid = (len(FieldList) <= 3)\r
else:\r
IsValid = (len(FieldList) <= 1)\r
- return [Value, Type, Size], IsValid, 0 \r
+ return [Value, Type, Size], IsValid, 0\r
elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):\r
VpdOffset = FieldList[0]\r
Value = Size = ''\r
# \r
# @retval ValueList: A List contain value, datum type and toke number. \r
#\r
-def AnalyzePcdData(Setting): \r
- ValueList = ['', '', ''] \r
- \r
- ValueRe = re.compile(r'^\s*L?\".*\|.*\"')\r
+def AnalyzePcdData(Setting):\r
+ ValueList = ['', '', '']\r
+\r
+ ValueRe = re.compile(r'^\s*L?\".*\|.*\"')\r
PtrValue = ValueRe.findall(Setting)\r
\r
ValueUpdateFlag = False\r
\r
if len(PtrValue) >= 1:\r
Setting = re.sub(ValueRe, '', Setting)\r
- ValueUpdateFlag = True \r
+ ValueUpdateFlag = True\r
\r
TokenList = Setting.split(TAB_VALUE_SPLIT)\r
ValueList[0:len(TokenList)] = TokenList\r
# \r
# @retval ValueList: A List contain VpdOffset, MaxDatumSize and InitialValue. \r
#\r
-def AnalyzeVpdPcdData(Setting): \r
- ValueList = ['', '', ''] \r
- \r
- ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')\r
+def AnalyzeVpdPcdData(Setting):\r
+ ValueList = ['', '', '']\r
+\r
+ ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')\r
PtrValue = ValueRe.findall(Setting)\r
\r
ValueUpdateFlag = False\r
\r
if len(PtrValue) >= 1:\r
Setting = re.sub(ValueRe, '', Setting)\r
- ValueUpdateFlag = True \r
+ ValueUpdateFlag = True\r
\r
TokenList = Setting.split(TAB_VALUE_SPLIT)\r
ValueList[0:len(TokenList)] = TokenList\r
#\r
def CheckPcdDatum(Type, Value):\r
if Type == "VOID*":\r
- ValueRe = re.compile(r'\s*L?\".*\"\s*$')\r
+ ValueRe = re.compile(r'\s*L?\".*\"\s*$')\r
if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"'))\r
or (Value.startswith('{') and Value.endswith('}'))\r
):\r
return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\\r
- ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) \r
+ ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type)\r
elif ValueRe.match(Value):\r
# Check the chars in UnicodeString or CString is printable\r
if Value.startswith("L"):\r
\r
if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:\r
if Index > OptionStart:\r
- OptionList.append(OptionString[OptionStart:Index-1])\r
+ OptionList.append(OptionString[OptionStart:Index - 1])\r
OptionStart = Index\r
LastChar = CurrentChar\r
OptionList.append(OptionString[OptionStart:])\r
if self.Root[-1] == os.path.sep:\r
self.File = self.Path[len(self.Root):]\r
else:\r
- self.File = self.Path[len(self.Root)+1:]\r
+ self.File = self.Path[len(self.Root) + 1:]\r
else:\r
self.Path = os.path.normpath(self.File)\r
\r
# @var MacroDictionary: To store keys and values defined in DEFINE statement\r
#\r
class ToolDefClassObject(object):\r
- def __init__(self, FileName = None):\r
+ def __init__(self, FileName=None):\r
self.ToolsDefTxtDictionary = {}\r
self.MacroDictionary = {}\r
for Env in os.environ:\r
FileContent = []\r
if os.path.isfile(FileName):\r
try:\r
- F = open(FileName,'r')\r
+ F = open(FileName, 'r')\r
FileContent = F.readlines()\r
except:\r
EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)\r
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()\r
\r
KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE]\r
- for Index in range(3,-1,-1):\r
+ for Index in range(3, -1, -1):\r
for Key in dict(self.ToolsDefTxtDictionary):\r
List = Key.split('_')\r
if List[Index] == '*':\r
import Common.GlobalData as GlobalData\r
from Common import EdkLogger\r
from Common.String import *\r
-from Common.Misc import DirCache,PathClass\r
+from Common.Misc import DirCache, PathClass\r
from Common.Misc import SaveFileOnChange\r
from Common.Misc import ClearDuplicatedInf\r
from Common.Misc import GuidStructureStringToGuidString\r
if 'EDK_SOURCE' in os.environ.keys():\r
GenFdsGlobalVariable.EdkSourceDir = os.path.normcase(os.environ['EDK_SOURCE'])\r
if (Options.debug):\r
- GenFdsGlobalVariable.VerboseLogger( "Using Workspace:" + Workspace)\r
+ GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace)\r
os.chdir(GenFdsGlobalVariable.WorkSpaceDir)\r
\r
# set multiple workspace\r
\r
if FdfFilename[0:2] == '..':\r
FdfFilename = os.path.realpath(FdfFilename)\r
- if not os.path.isabs (FdfFilename):\r
+ if not os.path.isabs(FdfFilename):\r
FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename)\r
if not os.path.exists(FdfFilename):\r
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename)\r
GenFds.DisplayFvSpaceInfo(FdfParserObj)\r
\r
except FdfParser.Warning, X:\r
- EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False)\r
+ EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)\r
ReturnCode = FORMAT_INVALID\r
except FatalError, X:\r
if Options.debug != None:\r
#\r
def myOptionParser():\r
usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\""\r
- Parser = OptionParser(usage=usage,description=__copyright__,version="%prog " + str(versionNumber))\r
+ Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))\r
Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback)\r
Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH")\r
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")\r
if UsedSizeValue == TotalSizeValue:\r
Percentage = '100'\r
else:\r
- Percentage = str((UsedSizeValue+0.0)/TotalSizeValue)[0:4].lstrip('0.') \r
- \r
+ Percentage = str((UsedSizeValue + 0.0) / TotalSizeValue)[0:4].lstrip('0.')\r
+\r
GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free')\r
\r
## PreprocessImage()\r
# @param ArchList The Arch list of platform\r
#\r
def SetDir (OutputDir, FdfParser, WorkSpace, ArchList):\r
- GenFdsGlobalVariable.VerboseLogger( "GenFdsGlobalVariable.OutputDir :%s" %OutputDir)\r
+ GenFdsGlobalVariable.VerboseLogger("GenFdsGlobalVariable.OutputDir :%s" % OutputDir)\r
# GenFdsGlobalVariable.OutputDirDict = OutputDir\r
GenFdsGlobalVariable.FdfParser = FdfParser\r
GenFdsGlobalVariable.WorkSpace = WorkSpace\r
# Create FV Address inf file\r
#\r
GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')\r
- FvAddressFile = open (GenFdsGlobalVariable.FvAddressFileName, 'w')\r
+ FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')\r
#\r
# Add [Options]\r
#\r
break\r
\r
FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \\r
- BsAddress + \\r
+ BsAddress + \\r
T_CHAR_LF)\r
\r
RtAddress = '0'\r
RtAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress\r
\r
FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \\r
- RtAddress + \\r
+ RtAddress + \\r
T_CHAR_LF)\r
\r
FvAddressFile.close()\r
CommandFile = Output + '.txt'\r
if Ui not in [None, '']:\r
#Cmd += ["-n", '"' + Ui + '"']\r
- SectionData = array.array('B', [0,0,0,0])\r
+ SectionData = array.array('B', [0, 0, 0, 0])\r
SectionData.fromstring(Ui.encode("utf_16_le"))\r
SectionData.append(0)\r
SectionData.append(0)\r
Len = len(SectionData)\r
GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15)\r
- SaveFileOnChange(Output, SectionData.tostring())\r
+ SaveFileOnChange(Output, SectionData.tostring())\r
elif Ver not in [None, '']:\r
Cmd += ["-n", Ver]\r
if BuildNumber:\r
Cmd = ["GenFv"]\r
if BaseAddress not in [None, '']:\r
Cmd += ["-r", BaseAddress]\r
- \r
+\r
if ForceRebase == False:\r
- Cmd +=["-F", "FALSE"]\r
+ Cmd += ["-F", "FALSE"]\r
elif ForceRebase == True:\r
- Cmd +=["-F", "TRUE"]\r
- \r
+ Cmd += ["-F", "TRUE"]\r
+\r
if Capsule:\r
Cmd += ["-c"]\r
if Dump:\r
if VendorId != None:\r
Cmd += ["-f", VendorId]\r
\r
- Cmd += ["-o", Output] \r
+ Cmd += ["-o", Output]\r
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom")\r
\r
@staticmethod\r
sys.stdout.write('\n')\r
\r
try:\r
- PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr= subprocess.PIPE, shell=True)\r
+ PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\r
except Exception, X:\r
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))\r
(out, error) = PopenObject.communicate()\r
returnValue[0] = PopenObject.returncode\r
return\r
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:\r
- GenFdsGlobalVariable.InfLogger ("Return Value = %d" %PopenObject.returncode)\r
+ GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)\r
GenFdsGlobalVariable.InfLogger (out)\r
GenFdsGlobalVariable.InfLogger (error)\r
if PopenObject.returncode != 0:\r
def InfLogger (msg):\r
EdkLogger.info(msg)\r
\r
- def ErrorLogger (msg, File = None, Line = None, ExtraData = None):\r
+ def ErrorLogger (msg, File=None, Line=None, ExtraData=None):\r
EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData)\r
\r
def DebugLogger (Level, msg):\r
# @param Str String that may contain macro\r
# @param MacroDict Dictionary that contains macro value pair\r
#\r
- def MacroExtend (Str, MacroDict = {}, Arch = 'COMMON'):\r
+ def MacroExtend (Str, MacroDict={}, Arch='COMMON'):\r
if Str == None :\r
return None\r
\r
\r
PcdValue = PcdObj.DefaultValue\r
return PcdValue\r
- \r
- for Package in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform, \r
- Arch, \r
- GenFdsGlobalVariable.TargetName, \r
+\r
+ for Package in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,\r
+ Arch,\r
+ GenFdsGlobalVariable.TargetName,\r
GenFdsGlobalVariable.ToolChainTag):\r
PcdDict = Package.Pcds\r
for Key in PcdDict:\r
# @param Dict dictionary contains macro and its value\r
# @retval tuple (Generated file name, section alignment)\r
#\r
- def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):\r
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):\r
#\r
# Generate all section\r
#\r
self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)\r
self.CurrentArchList = [FfsInf.CurrentArch]\r
\r
- SectFile = tuple()\r
+ SectFile = tuple()\r
SectAlign = []\r
Index = 0\r
MaxAlign = None\r
\r
for Sect in self.SectionList:\r
Index = Index + 1\r
- SecIndex = '%s.%d' %(SecNum,Index)\r
+ SecIndex = '%s.%d' % (SecNum, Index)\r
# set base address for inside FvImage\r
if isinstance(Sect, FvImageSection):\r
if self.FvAddr != []:\r
elif isinstance(Sect, GuidSection):\r
Sect.FvAddr = self.FvAddr\r
Sect.FvParentAddr = self.FvParentAddr\r
- ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList,FfsInf, Dict)\r
+ ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict)\r
if isinstance(Sect, GuidSection):\r
if Sect.IncludeFvSection:\r
self.IncludeFvSection = Sect.IncludeFvSection\r
self.Alignment = MaxAlign\r
\r
OutputFile = OutputPath + \\r
- os.sep + \\r
+ os.sep + \\r
ModuleName + \\r
- 'SEC' + \\r
- SecNum + \\r
+ 'SEC' + \\r
+ SecNum + \\r
Ffs.SectionSuffix['GUIDED']\r
OutputFile = os.path.normpath(OutputFile)\r
\r
# GENCRC32 section\r
#\r
if self.NameGuid == None :\r
- GenFdsGlobalVariable.VerboseLogger( "Use GenSection function Generate CRC32 Section")\r
+ GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")\r
GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign)\r
OutputFileList = []\r
OutputFileList.append(OutputFile)\r
elif ExternalTool == None:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)\r
else:\r
- DummyFile = OutputFile+".dummy"\r
+ DummyFile = OutputFile + ".dummy"\r
#\r
# Call GenSection with DUMMY section type.\r
#\r
# Use external tool process the Output\r
#\r
TempFile = OutputPath + \\r
- os.sep + \\r
+ os.sep + \\r
ModuleName + \\r
- 'SEC' + \\r
- SecNum + \\r
+ 'SEC' + \\r
+ SecNum + \\r
'.tmp'\r
TempFile = os.path.normpath(TempFile)\r
#\r
if not os.path.exists(TempFile):\r
EdkLogger.error("GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool)\r
\r
- FileHandleIn = open(DummyFile,'rb')\r
- FileHandleIn.seek(0,2)\r
+ FileHandleIn = open(DummyFile, 'rb')\r
+ FileHandleIn.seek(0, 2)\r
InputFileSize = FileHandleIn.tell()\r
- \r
- FileHandleOut = open(TempFile,'rb')\r
- FileHandleOut.seek(0,2)\r
+\r
+ FileHandleOut = open(TempFile, 'rb')\r
+ FileHandleOut.seek(0, 2)\r
TempFileSize = FileHandleOut.tell()\r
\r
Attribute = []\r
if self.ProcessRequired == "NONE" and HeaderLength == None:\r
if TempFileSize > InputFileSize:\r
FileHandleIn.seek(0)\r
- BufferIn = FileHandleIn.read()\r
+ BufferIn = FileHandleIn.read()\r
FileHandleOut.seek(0)\r
BufferOut = FileHandleOut.read()\r
if BufferIn == BufferOut[TempFileSize - InputFileSize:]:\r
\r
FileHandleIn.close()\r
FileHandleOut.close()\r
- \r
+\r
if FirstCall and 'PROCESSING_REQUIRED' in Attribute:\r
# Guided data by -z option on first call is the process required data. Call the guided tool with the real option.\r
GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption)\r
- \r
+\r
#\r
# Call Gensection Add Section Header\r
#\r
if self.ProcessRequired in ("TRUE", "1"):\r
if 'PROCESSING_REQUIRED' not in Attribute:\r
Attribute.append('PROCESSING_REQUIRED')\r
- \r
+\r
if self.AuthStatusValid in ("TRUE", "1"):\r
Attribute.append('AUTH_STATUS_VALID')\r
GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],\r
ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase\r
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)\r
- self.KeyStringList = [Target+'_'+ToolChain+'_'+self.CurrentArchList[0]]\r
+ self.KeyStringList = [Target + '_' + ToolChain + '_' + self.CurrentArchList[0]]\r
for Arch in self.CurrentArchList:\r
if Target + '_' + ToolChain + '_' + Arch not in self.KeyStringList:\r
self.KeyStringList.append(Target + '_' + ToolChain + '_' + Arch)\r
if self.NameGuid == ToolDef[1]:\r
KeyList = ToolDef[0].split('_')\r
Key = KeyList[0] + \\r
- '_' + \\r
+ '_' + \\r
KeyList[1] + \\r
- '_' + \\r
+ '_' + \\r
KeyList[2]\r
if Key in self.KeyStringList and KeyList[4] == 'GUID':\r
\r
- ToolPath = ToolDefinition.get( Key + \\r
- '_' + \\r
+ ToolPath = ToolDefinition.get(Key + \\r
+ '_' + \\r
KeyList[3] + \\r
- '_' + \\r
+ '_' + \\r
'PATH')\r
\r
- ToolOption = ToolDefinition.get( Key + \\r
- '_' + \\r
+ ToolOption = ToolDefinition.get(Key + \\r
+ '_' + \\r
KeyList[3] + \\r
- '_' + \\r
+ '_' + \\r
'FLAGS')\r
if ToolPathTmp == None:\r
ToolPathTmp = ToolPath\r
else:\r
if ToolPathTmp != ToolPath:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "Don't know which tool to use, %s or %s ?" % (ToolPathTmp, ToolPath))\r
- \r
- \r
+\r
+\r
return ToolPathTmp, ToolOption\r
\r
\r
# @retval string Generated FV file path\r
#\r
\r
- def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict = None, MacroDict = {}):\r
+ def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict=None, MacroDict={}):\r
Size = self.Size\r
GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset)\r
- GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" %Size)\r
+ GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size)\r
GenFdsGlobalVariable.SharpCounter = 0\r
\r
if self.RegionType == 'FV':\r
# Get Fv from FvDict\r
#\r
self.FvAddress = int(BaseAddress, 16) + self.Offset\r
- FvBaseAddress = '0x%X' %self.FvAddress\r
- FvOffset = 0\r
+ FvBaseAddress = '0x%X' % self.FvAddress\r
+ FvOffset = 0\r
for RegionData in self.RegionDataList:\r
FileName = None\r
if RegionData.endswith(".fv"):\r
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)\r
- GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s'%RegionData)\r
+ GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s' % RegionData)\r
if RegionData[1] != ':' :\r
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)\r
if not os.path.exists(RegionData):\r
EdkLogger.error("GenFds", GENFDS_ERROR,\r
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))\r
FvBuffer = StringIO.StringIO('')\r
- FvBaseAddress = '0x%X' %self.FvAddress\r
+ FvBaseAddress = '0x%X' % self.FvAddress\r
BlockSize = None\r
BlockNum = None\r
FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)\r
EdkLogger.error("GenFds", GENFDS_ERROR,\r
"Size of FV File (%s) is larger than Region Size 0x%X specified." \\r
% (RegionData, Size))\r
- BinFile = open (FileName, 'r+b')\r
+ BinFile = open(FileName, 'r+b')\r
Buffer.write(BinFile.read())\r
BinFile.close()\r
Size = Size - FileLength\r
for RegionData in self.RegionDataList:\r
if RegionData.endswith(".cap"):\r
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)\r
- GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s'%RegionData)\r
+ GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s' % RegionData)\r
if RegionData[1] != ':' :\r
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)\r
if not os.path.exists(RegionData):\r
EdkLogger.error("GenFds", GENFDS_ERROR,\r
"Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \\r
% (FileLength, RegionData, Size))\r
- BinFile = open (FileName, 'r+b')\r
+ BinFile = open(FileName, 'r+b')\r
Buffer.write(BinFile.read())\r
BinFile.close()\r
Size = Size - FileLength\r
EdkLogger.error("GenFds", GENFDS_ERROR,\r
"Size of File (%s) is larger than Region Size 0x%X specified." \\r
% (RegionData, Size))\r
- GenFdsGlobalVariable.InfLogger(' Region File Name = %s'%RegionData)\r
- BinFile = open (RegionData, 'rb')\r
+ GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData)\r
+ BinFile = open(RegionData, 'rb')\r
Buffer.write(BinFile.read())\r
BinFile.close()\r
Size = Size - FileLength\r
Granu = 1024\r
Str = Str[:-1]\r
elif Str.endswith('M'):\r
- Granu = 1024*1024\r
+ Granu = 1024 * 1024\r
Str = Str[:-1]\r
elif Str.endswith('G'):\r
- Granu = 1024*1024*1024\r
+ Granu = 1024 * 1024 * 1024\r
Str = Str[:-1]\r
else:\r
pass\r
\r
- AlignValue = int(Str)*Granu\r
+ AlignValue = int(Str) * Granu\r
return AlignValue\r
- \r
+\r
## BlockSizeOfRegion()\r
#\r
# @param BlockSizeList List of block information\r
else:\r
# region ended within current blocks\r
if self.Offset + self.Size <= End:\r
- ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1)/BlockSize))\r
+ ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1) / BlockSize))\r
break\r
# region not ended yet\r
else:\r
UsedBlockNum = BlockNum\r
# region started in middle of current blocks\r
else:\r
- UsedBlockNum = (End - self.Offset)/BlockSize\r
+ UsedBlockNum = (End - self.Offset) / BlockSize\r
Start = End\r
ExpectedList.append((BlockSize, UsedBlockNum))\r
RemindingSize -= BlockSize * UsedBlockNum\r
- \r
+\r
if FvObj.BlockSizeList == []:\r
FvObj.BlockSizeList = ExpectedList\r
else:\r
Sum += Item[0] * Item[1]\r
if self.Size < Sum:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x "\r
- %(FvObj.UiFvName, Sum, self.Size))\r
+ % (FvObj.UiFvName, Sum, self.Size))\r
# check whether the BlockStatements in FV section is appropriate\r
ExpectedListData = ''\r
for Item in ExpectedList:\r
- ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t"%Item \r
+ ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t" % Item\r
Index = 0\r
for Item in FvObj.BlockSizeList:\r
if Item[0] != ExpectedList[Index][0]:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"\r
- %FvObj.UiFvName, ExtraData = ExpectedListData)\r
+ % FvObj.UiFvName, ExtraData=ExpectedListData)\r
elif Item[1] != ExpectedList[Index][1]:\r
if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1):\r
break;\r
else:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"\r
- %FvObj.UiFvName, ExtraData = ExpectedListData)\r
+ % FvObj.UiFvName, ExtraData=ExpectedListData)\r
else:\r
Index += 1\r
\r
# @param Dict dictionary contains macro and its value\r
# @retval tuple (Generated file name, section alignment)\r
#\r
- def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):\r
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):\r
#\r
# Prepare the parameter of GenSection\r
#\r
# @param Dict dictionary contains macro and its value\r
# @retval tuple (Generated file name, section alignment)\r
#\r
- def GenSection(self,OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):\r
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):\r
#\r
# Prepare the parameter of GenSection\r
#\r
def GenBsfInf (self):\r
FvList = self.GetFvList()\r
self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')\r
- BsfInf = open (self.BsfInfName, 'w+')\r
+ BsfInf = open(self.BsfInfName, 'w+')\r
if self.ResetBin != None:\r
BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF)\r
- BsfInf.writelines ("IA32_RST_BIN" + \\r
- " = " + \\r
+ BsfInf.writelines ("IA32_RST_BIN" + \\r
+ " = " + \\r
GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.ResetBin)) + \\r
- T_CHAR_LF )\r
- BsfInf.writelines (T_CHAR_LF )\r
- \r
+ T_CHAR_LF)\r
+ BsfInf.writelines (T_CHAR_LF)\r
+\r
BsfInf.writelines ("[COMPONENTS]" + T_CHAR_LF)\r
\r
for ComponentObj in self.ComponentStatementList :\r
- BsfInf.writelines ("COMP_NAME" + \\r
- " = " + \\r
+ BsfInf.writelines ("COMP_NAME" + \\r
+ " = " + \\r
ComponentObj.CompName + \\r
- T_CHAR_LF )\r
+ T_CHAR_LF)\r
if ComponentObj.CompLoc.upper() == 'NONE':\r
- BsfInf.writelines ("COMP_LOC" + \\r
- " = " + \\r
- 'N' + \\r
- T_CHAR_LF )\r
- \r
+ BsfInf.writelines ("COMP_LOC" + \\r
+ " = " + \\r
+ 'N' + \\r
+ T_CHAR_LF)\r
+\r
elif ComponentObj.FilePos != None:\r
- BsfInf.writelines ("COMP_LOC" + \\r
- " = " + \\r
+ BsfInf.writelines ("COMP_LOC" + \\r
+ " = " + \\r
ComponentObj.FilePos + \\r
- T_CHAR_LF )\r
+ T_CHAR_LF)\r
else:\r
Index = FvList.index(ComponentObj.CompLoc.upper())\r
if Index == 0:\r
- BsfInf.writelines ("COMP_LOC" + \\r
- " = " + \\r
- 'F' + \\r
- T_CHAR_LF )\r
+ BsfInf.writelines ("COMP_LOC" + \\r
+ " = " + \\r
+ 'F' + \\r
+ T_CHAR_LF)\r
elif Index == 1:\r
- BsfInf.writelines ("COMP_LOC" + \\r
- " = " + \\r
- 'S' + \\r
- T_CHAR_LF )\r
- \r
- BsfInf.writelines ("COMP_TYPE" + \\r
- " = " + \\r
+ BsfInf.writelines ("COMP_LOC" + \\r
+ " = " + \\r
+ 'S' + \\r
+ T_CHAR_LF)\r
+\r
+ BsfInf.writelines ("COMP_TYPE" + \\r
+ " = " + \\r
ComponentObj.CompType + \\r
- T_CHAR_LF )\r
- BsfInf.writelines ("COMP_VER" + \\r
- " = " + \\r
+ T_CHAR_LF)\r
+ BsfInf.writelines ("COMP_VER" + \\r
+ " = " + \\r
ComponentObj.CompVer + \\r
- T_CHAR_LF )\r
- BsfInf.writelines ("COMP_CS" + \\r
- " = " + \\r
+ T_CHAR_LF)\r
+ BsfInf.writelines ("COMP_CS" + \\r
+ " = " + \\r
ComponentObj.CompCs + \\r
- T_CHAR_LF )\r
- \r
+ T_CHAR_LF)\r
+\r
BinPath = ComponentObj.CompBin\r
if BinPath != '-':\r
BinPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(BinPath))\r
- BsfInf.writelines ("COMP_BIN" + \\r
- " = " + \\r
+ BsfInf.writelines ("COMP_BIN" + \\r
+ " = " + \\r
BinPath + \\r
- T_CHAR_LF )\r
- \r
+ T_CHAR_LF)\r
+\r
SymPath = ComponentObj.CompSym\r
if SymPath != '-':\r
SymPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(SymPath))\r
- BsfInf.writelines ("COMP_SYM" + \\r
- " = " + \\r
+ BsfInf.writelines ("COMP_SYM" + \\r
+ " = " + \\r
SymPath + \\r
- T_CHAR_LF )\r
- BsfInf.writelines ("COMP_SIZE" + \\r
- " = " + \\r
+ T_CHAR_LF)\r
+ BsfInf.writelines ("COMP_SIZE" + \\r
+ " = " + \\r
ComponentObj.CompSize + \\r
- T_CHAR_LF )\r
- BsfInf.writelines (T_CHAR_LF )\r
- \r
+ T_CHAR_LF)\r
+ BsfInf.writelines (T_CHAR_LF)\r
+\r
BsfInf.close()\r
\r
## GenFvList() method\r
(BaseAddress, Size) = FdAddressDict.get(i)\r
CmdStr += (\r
'-r', '0x%x' % BaseAddress,\r
- '-s', '0x%x' %Size,\r
+ '-s', '0x%x' % Size,\r
)\r
return CmdStr\r
\r
@param lines line array for map file\r
\r
@return a list which element hold (PcdName, Offset, SectionName)\r
- """ \r
+ """\r
status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table\r
- secs = [] # key = section name\r
+ secs = [] # key = section name\r
bPcds = []\r
- \r
+\r
\r
for line in lines:\r
line = line.strip()\r
continue\r
if re.match("^entry point at", line):\r
status = 3\r
- continue \r
+ continue\r
if status == 1 and len(line) != 0:\r
- m = secRe.match(line)\r
+ m = secRe.match(line)\r
assert m != None, "Fail to parse the section in map file , line is %s" % line\r
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)\r
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])\r
m = symRe.match(line)\r
assert m != None, "Fail to parse the symbol in map file, line is %s" % line\r
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)\r
- sec_no = int(sec_no, 16)\r
+ sec_no = int(sec_no, 16)\r
sym_offset = int(sym_offset, 16)\r
- vir_addr = int(vir_addr, 16)\r
+ vir_addr = int(vir_addr, 16)\r
m2 = re.match('^[_]+gPcd_BinaryPatch_([\w]+)', sym_name)\r
if m2 != None:\r
# fond a binary pcd entry in map file\r
f.close()\r
\r
#print 'Success to generate Binary Patch PCD table at %s!' % pcdpath \r
- \r
+\r
if __name__ == '__main__':\r
UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>"\r
AdditionalNotes = "\nPCD table is generated in file name with .BinaryPcdTable.txt postfix"\r
if options.mapfile == None or options.efifile == None:\r
print parser.get_usage()\r
elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):\r
- list = parsePcdInfoFromMapFile(options.mapfile, options.efifile) \r
+ list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)\r
if list != None:\r
if options.outfile != None:\r
generatePcdTable(list, options.outfile)\r
else:\r
- generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt')) \r
+ generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))\r
else:\r
print 'Fail to generate Patch PCD Table based on map file and efi file'\r
else:\r
#\r
# Length of Binary File\r
#\r
- FileHandle = open (FileName, 'rb')\r
+ FileHandle = open(FileName, 'rb')\r
FileHandle.seek (0, 2)\r
FileLength = FileHandle.tell()\r
FileHandle.close()\r
return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD."\r
ValueLength = int(MaxSize)\r
else:\r
- return PARAMETER_INVALID, "PCD type %s is not valid." %(CommandOptions.PcdTypeName)\r
+ return PARAMETER_INVALID, "PCD type %s is not valid." % (CommandOptions.PcdTypeName)\r
#\r
# Check PcdValue is in the input binary file.\r
#\r
#\r
# Read binary file into array\r
#\r
- FileHandle = open (FileName, 'rb')\r
+ FileHandle = open(FileName, 'rb')\r
ByteArray = array.array('B')\r
ByteArray.fromfile(FileHandle, FileLength)\r
FileHandle.close()\r
if ValueNumber != 0:\r
ValueNumber = 1\r
except:\r
- return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." %(ValueString)\r
+ return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)\r
#\r
# Set PCD value into binary data\r
#\r
else:\r
ValueNumber = int (ValueString)\r
except:\r
- return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." %(ValueString)\r
+ return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)\r
#\r
# Set PCD value into binary data\r
#\r
if Index >= ValueLength:\r
break\r
except:\r
- return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." %(ValueString)\r
+ return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)\r
else:\r
#\r
# Patch ascii string \r
if ByteList != OrigByteList:\r
ByteArray = array.array('B')\r
ByteArray.fromlist(ByteList)\r
- FileHandle = open (FileName, 'wb')\r
+ FileHandle = open(FileName, 'wb')\r
ByteArray.tofile(FileHandle)\r
FileHandle.close()\r
- return 0, "Patch Value into File %s successfully." %(FileName)\r
+ return 0, "Patch Value into File %s successfully." % (FileName)\r
\r
## Parse command line options\r
#\r
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")\r
return 1\r
if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]:\r
- EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." %(CommandOptions.PcdTypeName))\r
+ EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))\r
return 1\r
if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None:\r
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")\r
# @param Enabled: If this error enabled\r
# @param Corrected: if this error corrected\r
#\r
- def Insert(self, ErrorID, OtherMsg = '', BelongsToTable = '', BelongsToItem = -1, Enabled = 0, Corrected = -1):\r
+ def Insert(self, ErrorID, OtherMsg='', BelongsToTable='', BelongsToItem= -1, Enabled=0, Corrected= -1):\r
self.ID = self.ID + 1\r
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \\r
% (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected)\r
Table.Insert(self, SqlCommand)\r
- \r
+\r
return self.ID\r
- \r
+\r
## Query table\r
#\r
# @retval: A recordSet of all found records \r
#\r
# @param Filename: To filename to save the report content\r
#\r
- def ToCSV(self, Filename = 'Report.csv'):\r
+ def ToCSV(self, Filename='Report.csv'):\r
try:\r
File = open(Filename, 'w+')\r
File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""")\r
if NewRecord != []:\r
File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg))\r
EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg))\r
- \r
+\r
File.close()\r
except IOError:\r
NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime())\r
\r
ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)\r
if not Valid:\r
- EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex+1,\r
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1,\r
ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))\r
PcdValue = ValList[Index]\r
if PcdValue:\r
LineNo = Record[6]\r
break\r
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,\r
- "MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType,' '.join(l for l in SUP_MODULE_LIST)), \r
+ "MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),\r
File=self.MetaFile, Line=LineNo)\r
if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):\r
if self._ModuleType == SUP_MODULE_SMM_CORE:\r
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile) \r
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)\r
if self._Defs and 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \\r
and 'PCI_CLASS_CODE' in self._Defs:\r
self._BuildType = 'UEFI_OPTIONROM'\r
self._BuildType = 'UEFI_HII'\r
else:\r
self._BuildType = self._ModuleType.upper()\r
- \r
+\r
if self._DxsFile:\r
File = PathClass(NormPath(self._DxsFile), self._ModuleDir, Arch=self._Arch)\r
# check the file validation\r
if not self._ComponentType:\r
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,\r
"COMPONENT_TYPE is not given", File=self.MetaFile)\r
- self._BuildType = self._ComponentType.upper() \r
+ self._BuildType = self._ComponentType.upper()\r
if self._ComponentType in self._MODULE_TYPE_:\r
self._ModuleType = self._MODULE_TYPE_[self._ComponentType]\r
if self._ComponentType == 'LIBRARY':\r
Macros["EDK_SOURCE"] = GlobalData.gEcpSource\r
Macros['PROCESSOR'] = self._Arch\r
RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform]\r
- for Name,Value,Dummy,Arch,Platform,ID,LineNo in RecordList:\r
+ for Name, Value, Dummy, Arch, Platform, ID, LineNo in RecordList:\r
Value = ReplaceMacro(Value, Macros, True)\r
if Name == "IMAGE_ENTRY_POINT":\r
if self._ModuleEntryPointList == None:\r
'build',\r
FORMAT_INVALID,\r
"No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdCName, str(Package)),\r
- File =self.MetaFile, Line=LineNo,\r
+ File=self.MetaFile, Line=LineNo,\r
ExtraData=None\r
) \r
#\r
'build',\r
FORMAT_INVALID,\r
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),\r
- File =self.MetaFile, Line=LineNo,\r
+ File=self.MetaFile, Line=LineNo,\r
ExtraData=None\r
)\r
\r
EdkLogger.error(\r
'build',\r
FORMAT_INVALID,\r
- "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),\r
- File =self.MetaFile, Line=LineNo,\r
+ "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),\r
+ File=self.MetaFile, Line=LineNo,\r
ExtraData=None\r
- ) \r
+ )\r
except:\r
EdkLogger.error(\r
'build',\r
FORMAT_INVALID,\r
- "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),\r
- File =self.MetaFile, Line=LineNo,\r
+ "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),\r
+ File=self.MetaFile, Line=LineNo,\r
ExtraData=None\r
)\r
- \r
+\r
Pcd.DatumType = PcdInPackage.DatumType\r
Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize\r
Pcd.InfDefaultValue = Pcd.DefaultValue\r
'build',\r
FORMAT_INVALID,\r
"PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile),\r
- File =self.MetaFile, Line=LineNo,\r
+ File=self.MetaFile, Line=LineNo,\r
ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages])\r
)\r
Pcds[PcdCName, TokenSpaceGuid] = Pcd\r
## Summarize all packages in the database\r
def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag):\r
self.Platform = Platform\r
- PackageList =[]\r
+ PackageList = []\r
Pa = self.BuildObject[self.Platform, 'COMMON']\r
#\r
# Get Package related to Modules\r
LibObj = self.BuildObject[Lib, Arch, TargetName, ToolChainTag]\r
for Package in LibObj.Packages:\r
if Package not in PackageList:\r
- PackageList.append(Package) \r
- \r
+ PackageList.append(Package)\r
+\r
return PackageList\r
\r
## Summarize all platforms in the database\r
gEndOfLine = "\r\n"\r
\r
## Tags for section start, end and separator\r
-gSectionStart = ">" + "=" * (gLineMaxLength-2) + "<"\r
-gSectionEnd = "<" + "=" * (gLineMaxLength-2) + ">" + "\n"\r
+gSectionStart = ">" + "=" * (gLineMaxLength - 2) + "<"\r
+gSectionEnd = "<" + "=" * (gLineMaxLength - 2) + ">" + "\n"\r
gSectionSep = "=" * gLineMaxLength\r
\r
## Tags for subsection start, end and separator\r
-gSubSectionStart = ">" + "-" * (gLineMaxLength-2) + "<"\r
-gSubSectionEnd = "<" + "-" * (gLineMaxLength-2) + ">"\r
+gSubSectionStart = ">" + "-" * (gLineMaxLength - 2) + "<"\r
+gSubSectionEnd = "<" + "-" * (gLineMaxLength - 2) + ">"\r
gSubSectionSep = "-" * gLineMaxLength\r
\r
\r
def __init__(self, Wa):\r
self._GuidDb = {}\r
for Pa in Wa.AutoGenObjectList:\r
- for Package in Pa.PackageList: \r
+ for Package in Pa.PackageList:\r
for Protocol in Package.Protocols:\r
GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])\r
self._GuidDb[GuidValue.upper()] = Protocol\r
GuidString = self._GuidDb.get(GuidValue, GuidValue)\r
Statement = "%s %s" % (Statement, GuidString)\r
DepexStatement.append(Statement)\r
- OpCode = DepexFile.read(1) \r
- \r
+ OpCode = DepexFile.read(1)\r
+\r
return DepexStatement\r
\r
##\r
#\r
def __init__(self, M):\r
self.Depex = ""\r
- self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex") \r
+ self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")\r
ModuleType = M.ModuleType\r
if not ModuleType:\r
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")\r
# If a module complies to PI 1.1, promote Module type to "SMM_DRIVER"\r
#\r
if ModuleType == "DXE_SMM_DRIVER":\r
- PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000")\r
+ PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000")\r
if int(PiSpec, 0) >= 0x0001000A:\r
ModuleType = "SMM_DRIVER"\r
self.DriverType = gDriverTypeMap.get(ModuleType, "0x2 (FREE_FORM)")\r
# Report PCD item according to their override relationship\r
#\r
if DecMatch and InfMatch:\r
- FileWrite(File, ' %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip()))\r
+ FileWrite(File, ' %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))\r
else:\r
if DscMatch:\r
if (Pcd.TokenCName, Key) in self.FdfPcdSet:\r
- FileWrite(File, ' *F %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip()))\r
+ FileWrite(File, ' *F %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))\r
else:\r
- FileWrite(File, ' *P %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip()))\r
+ FileWrite(File, ' *P %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))\r
else:\r
- FileWrite(File, ' *M %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip()))\r
- \r
+ FileWrite(File, ' *M %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))\r
+\r
if TypeName in ('DYNHII', 'DEXHII', 'DYNVPD', 'DEXVPD'):\r
for SkuInfo in Pcd.SkuInfoList.values():\r
if TypeName in ('DYNHII', 'DEXHII'):\r
- FileWrite(File, '%*s: %s: %s' % (self.MaxLen + 4, SkuInfo.VariableGuid, SkuInfo.VariableName, SkuInfo.VariableOffset)) \r
+ FileWrite(File, '%*s: %s: %s' % (self.MaxLen + 4, SkuInfo.VariableGuid, SkuInfo.VariableName, SkuInfo.VariableOffset))\r
else:\r
FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset))\r
\r
\r
# check if the file path exists or not\r
if not os.path.isfile(FileFullPath):\r
- EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath)\r
+ EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath)\r
\r
# remove workspace directory from the beginning part of the file path\r
if Workspace[-1] in ["\\", "/"]:\r
# Update Image to new BaseAddress by GenFw tool\r
#\r
LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)\r
- LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)\r
+ LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)\r
else:\r
#\r
# Set new address to the section header only for SMM driver.\r
#\r
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)\r
- LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)\r
+ LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)\r
#\r
# Collect funtion address from Map file\r
#\r
FunctionList = []\r
if os.path.exists(ImageMapTable):\r
OrigImageBaseAddress = 0\r
- ImageMap = open (ImageMapTable, 'r')\r
+ ImageMap = open(ImageMapTable, 'r')\r
for LinStr in ImageMap:\r
if len (LinStr.strip()) == 0:\r
continue\r
\r
StrList = LinStr.split()\r
if len (StrList) > 4:\r
- if StrList[3] == 'f' or StrList[3] =='F':\r
+ if StrList[3] == 'f' or StrList[3] == 'F':\r
Name = StrList[1]\r
RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress\r
FunctionList.append ((Name, RelativeAddress))\r
if not ImageClass.IsValid:\r
EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo)\r
ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass)\r
- if Module.ModuleType in ['PEI_CORE', 'PEIM', 'COMBINED_PEIM_DRIVER','PIC_PEIM', 'RELOCATABLE_PEIM', 'DXE_CORE']:\r
+ if Module.ModuleType in ['PEI_CORE', 'PEIM', 'COMBINED_PEIM_DRIVER', 'PIC_PEIM', 'RELOCATABLE_PEIM', 'DXE_CORE']:\r
PeiModuleList[Module.MetaFile] = ImageInfo\r
PeiSize += ImageInfo.Image.Size\r
elif Module.ModuleType in ['BS_DRIVER', 'DXE_DRIVER', 'UEFI_DRIVER']:\r
for PcdInfo in PcdTable:\r
ReturnValue = 0\r
if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE:\r
- ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize/0x1000))\r
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize / 0x1000))\r
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE:\r
- ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize/0x1000))\r
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize / 0x1000))\r
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE:\r
- ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize/0x1000))\r
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize / 0x1000))\r
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0:\r
- ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize/0x1000))\r
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize / 0x1000))\r
if ReturnValue != 0:\r
EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)\r
\r
- MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize/0x1000))\r
- MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize/0x1000))\r
- MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize/0x1000))\r
+ MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize / 0x1000))\r
+ MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize / 0x1000))\r
+ MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize / 0x1000))\r
if len (SmmModuleList) > 0:\r
- MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000))\r
+ MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize / 0x1000))\r
\r
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize\r
BtBaseAddr = TopMemoryAddress - RtSize\r
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)\r
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)\r
self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)\r
- self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset = False, ModeIsSmm = True)\r
+ self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)\r
MapBuffer.write('\n\n')\r
sys.stdout.write ("\n")\r
sys.stdout.flush()\r
SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False)\r
MapBuffer.close()\r
if self.LoadFixAddress != 0:\r
- sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" %(MapFilePath))\r
+ sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))\r
sys.stdout.flush()\r
\r
## Build active platform for different build targets and different tool chains\r
BUILD_ERROR,\r
"Module for [%s] is not a component of active platform."\\r
" Please make sure that the ARCH and inf file path are"\\r
- " given in the same as in [%s]" %\\r
+ " given in the same as in [%s]" % \\r
(', '.join(Wa.ArchList), self.PlatformFile),\r
ExtraData=self.ModuleFile\r
)\r
# @retval Args Target of build command\r
#\r
def MyOptionParser():\r
- Parser = OptionParser(description=__copyright__,version=__version__,prog="build.exe",usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]")\r
- Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32','X64','IPF','EBC','ARM', 'AARCH64'], dest="TargetArch",\r
+ Parser = OptionParser(description=__copyright__, version=__version__, prog="build.exe", usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]")\r
+ Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32', 'X64', 'IPF', 'EBC', 'ARM', 'AARCH64'], dest="TargetArch",\r
help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.")\r
Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback,\r
help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.")\r
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")\r
\r
Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.")\r
- Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD','LIBRARY','FLASH','DEPEX','BUILD_FLAGS','FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[],\r
+ Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD', 'LIBRARY', 'FLASH', 'DEPEX', 'BUILD_FLAGS', 'FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[],\r
help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, EXECUTION_ORDER]. "\\r
"To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS]")\r
Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",\r
Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.")\r
Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")\r
\r
- (Opt, Args)=Parser.parse_args()\r
+ (Opt, Args) = Parser.parse_args()\r
return (Opt, Args)\r
\r
## Tool entrance method\r
Target = "all"\r
elif len(Target) >= 2:\r
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.",\r
- ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget)))\r
+ ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))\r
else:\r
Target = Target[0].lower()\r
\r
if Target not in gSupportedTarget:\r
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target,\r
- ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget)))\r
+ ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))\r
\r
#\r
# Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH\r
if Option != None and Option.debug != None:\r
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())\r
else:\r
- EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False)\r
+ EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)\r
ReturnCode = FORMAT_INVALID\r
except KeyboardInterrupt:\r
ReturnCode = ABORT_ERROR\r
BuildDuration = time.gmtime(int(round(FinishTime - StartTime)))\r
BuildDurationStr = ""\r
if BuildDuration.tm_yday > 1:\r
- BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)"%(BuildDuration.tm_yday - 1)\r
+ BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)\r
else:\r
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)\r
if MyBuild != None:\r