X-Git-Url: https://git.proxmox.com/?p=mirror_edk2.git;a=blobdiff_plain;f=BaseTools%2FSource%2FPython%2FCommon%2FMisc.py;h=f44000829aacfe5dacc7090d6a1fc1506946ee59;hp=01297cd0a916f0e50fad79fc405150c38c0cf916;hb=e4ff28c3ac72f946686e715bc32490b35c08ad5b;hpb=97fa0ee9b1cffbb4b97ee35365afa7afcf50e174 diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py index 01297cd0a9..f44000829a 100644 --- a/BaseTools/Source/Python/Common/Misc.py +++ b/BaseTools/Source/Python/Common/Misc.py @@ -1,7 +1,7 @@ ## @file # Common routines used by all tools # -# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.
+# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.
# This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -24,6 +24,7 @@ import re import cPickle import array import shutil +from struct import pack from UserDict import IterableUserDict from UserList import UserList @@ -34,9 +35,11 @@ from BuildToolError import * from CommonDataClass.DataClass import * from Parsing import GetSplitValueList from Common.LongFilePathSupport import OpenLongFilePath as open +from Common.MultipleWorkspace import MultipleWorkspace as mws +import uuid ## Regular expression used to find out place holders in string template -gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE) +gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE) ## Dictionary used to store file time stamp for quick re-access gFileTimeStampCache = {} # {file path : file time stamp} @@ -44,6 +47,161 @@ gFileTimeStampCache = {} # {file path : file time stamp} ## Dictionary used to store dependencies of files gDependencyDatabase = {} # arch : {file path : [dependent files list]} +def GetVariableOffset(mapfilepath, efifilepath, varnames): + """ Parse map file to get variable offset in current EFI file + @param mapfilepath Map file absolution path + @param efifilepath: EFI binary file full path + @param varnames iteratable container whose elements are variable names to be searched + + @return List whos elements are tuple with variable name and raw offset + """ + lines = [] + try: + f = open(mapfilepath, 'r') + lines = f.readlines() + f.close() + except: + return None + + if len(lines) == 0: return None + firstline = lines[0].strip() + if (firstline.startswith("Archive member included ") and + firstline.endswith(" file (symbol)")): + return _parseForGCC(lines, efifilepath, varnames) + if firstline.startswith("# Path:"): + return _parseForXcode(lines, efifilepath, varnames) + return _parseGeneral(lines, efifilepath, varnames) + +def _parseForXcode(lines, efifilepath, varnames): + status = 0 + ret = [] + for index, line in enumerate(lines): + line = line.strip() + if status == 0 and line == "# Symbols:": + status = 1 + continue + if status == 1 and len(line) != 0: + for varname in varnames: + if varname in line: + m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line) + if m != None: + ret.append((varname, m.group(1))) + return ret + +def _parseForGCC(lines, efifilepath, varnames): + """ Parse map file generated by GCC linker """ + status = 0 + sections = [] + varoffset = [] + for index, line in enumerate(lines): + line = line.strip() + # status machine transection + if status == 0 and line == "Memory Configuration": + status = 1 + continue + elif status == 1 and line == 'Linker script and memory map': + status = 2 + continue + elif status ==2 and line == 'START GROUP': + status = 3 + continue + + # status handler + if status == 3: + m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line) + if m != None: + sections.append(m.groups(0)) + for varname in varnames: + Str = '' + m = re.match("^.data.(%s)" % varname, line) + if m != None: + m = re.match(".data.(%s)$" % varname, line) + if m != None: + Str = lines[index + 1] + else: + Str = line[len(".data.%s" % varname):] + if Str: + m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip()) + if m != None: + varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0])) + + if not varoffset: + return [] + # get section information from efi file + efisecs = PeImageClass(efifilepath).SectionHeaderList + if efisecs == None or len(efisecs) == 0: + return [] + #redirection + redirection = 0 + for efisec in efisecs: + for section in sections: + if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text': + redirection = int(section[1], 16) - efisec[1] + + ret = [] + for var in varoffset: + for efisec in efisecs: + if var[1] >= efisec[1] and var[1] < efisec[1]+efisec[3]: + ret.append((var[0], hex(efisec[2] + var[1] - efisec[1] - redirection))) + return ret + +def _parseGeneral(lines, efifilepath, varnames): + status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table + secs = [] # key = section name + varoffset = [] + secRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\da-fA-F]+)[Hh]? +([.\w\$]+) +(\w+)', re.UNICODE) + symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.:\\\\\w\?@\$]+) +([\da-fA-F]+)', re.UNICODE) + + for line in lines: + line = line.strip() + if re.match("^Start[' ']+Length[' ']+Name[' ']+Class", line): + status = 1 + continue + if re.match("^Address[' ']+Publics by Value[' ']+Rva\+Base", line): + status = 2 + continue + if re.match("^entry point at", line): + status = 3 + continue + if status == 1 and len(line) != 0: + m = secRe.match(line) + assert m != None, "Fail to parse the section in map file , line is %s" % line + sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0) + secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class]) + if status == 2 and len(line) != 0: + for varname in varnames: + m = symRe.match(line) + assert m != None, "Fail to parse the symbol in map file, line is %s" % line + sec_no, sym_offset, sym_name, vir_addr = m.groups(0) + sec_no = int(sec_no, 16) + sym_offset = int(sym_offset, 16) + vir_addr = int(vir_addr, 16) + m2 = re.match('^[_]*(%s)' % varname, sym_name) + if m2 != None: + # fond a binary pcd entry in map file + for sec in secs: + if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]): + varoffset.append([varname, sec[3], sym_offset, vir_addr, sec_no]) + + if not varoffset: return [] + + # get section information from efi file + efisecs = PeImageClass(efifilepath).SectionHeaderList + if efisecs == None or len(efisecs) == 0: + return [] + + ret = [] + for var in varoffset: + index = 0 + for efisec in efisecs: + index = index + 1 + if var[1].strip() == efisec[0].strip(): + ret.append((var[0], hex(efisec[2] + var[2]))) + elif var[4] == index: + ret.append((var[0], hex(efisec[2] + var[2]))) + + return ret + ## Routine to process duplicated INF # # This function is called by following two cases: @@ -163,11 +321,11 @@ def ProcessVariableArgument(Option, OptionString, Value, Parser): def GuidStringToGuidStructureString(Guid): GuidList = Guid.split('-') Result = '{' - for Index in range(0,3,1): + for Index in range(0, 3, 1): Result = Result + '0x' + GuidList[Index] + ', ' Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4] - for Index in range(0,12,2): - Result = Result + ', 0x' + GuidList[4][Index:Index+2] + for Index in range(0, 12, 2): + Result = Result + ', 0x' + GuidList[4][Index:Index + 2] Result += '}}' return Result @@ -364,7 +522,7 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True): Fd.write(Content) Fd.close() except IOError, X: - EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X) + EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) return True @@ -483,7 +641,7 @@ class DirCache: # # @retval A list of all files # -def GetFiles(Root, SkipList=None, FullPath = True): +def GetFiles(Root, SkipList=None, FullPath=True): OriPath = Root FileList = [] for Root, Dirs, Files in os.walk(Root): @@ -526,23 +684,26 @@ def RealPath(File, Dir='', OverrideDir=''): return NewFile def RealPath2(File, Dir='', OverrideDir=''): + NewFile = None if OverrideDir: NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))] if NewFile: if OverrideDir[-1] == os.path.sep: return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)] else: - return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] + return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)] if GlobalData.gAllFiles: NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))] - else: + if not NewFile: NewFile = os.path.normpath(os.path.join(Dir, File)) + if not os.path.exists(NewFile): + return None, None if NewFile: if Dir: if Dir[-1] == os.path.sep: return NewFile[len(Dir):], NewFile[0:len(Dir)] else: - return NewFile[len(Dir)+1:], NewFile[0:len(Dir)] + return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)] else: return NewFile, '' @@ -568,7 +729,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource=' # Replace the default dir to current dir if Dir == '.': Dir = os.getcwd() - Dir = Dir[len(Workspace)+1:] + Dir = Dir[len(Workspace) + 1:] # First check if File has Edk definition itself if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1: @@ -607,7 +768,7 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.' # Dir is current module dir related to workspace if Dir == '.': Dir = os.getcwd() - Dir = Dir[len(Workspace)+1:] + Dir = Dir[len(Workspace) + 1:] NewFile = File RelaPath = AllFiles[os.path.normpath(Dir)] @@ -661,13 +822,18 @@ def GetRelPath(Path1, Path2): # # @param CName The CName of the GUID # @param PackageList List of packages looking-up in +# @param Inffile The driver file # # @retval GuidValue if the CName is found in any given package # @retval None if the CName is not found in all given packages # -def GuidValue(CName, PackageList): +def GuidValue(CName, PackageList, Inffile = None): for P in PackageList: - if CName in P.Guids: + GuidKeys = P.Guids.keys() + if Inffile and P._PrivateGuids: + if not Inffile.startswith(P.MetaFile.Dir): + GuidKeys = (dict.fromkeys(x for x in P.Guids if x not in P._PrivateGuids)).keys() + if CName in GuidKeys: return P.Guids[CName] return None @@ -675,13 +841,18 @@ def GuidValue(CName, PackageList): # # @param CName The CName of the GUID # @param PackageList List of packages looking-up in +# @param Inffile The driver file # # @retval GuidValue if the CName is found in any given package # @retval None if the CName is not found in all given packages # -def ProtocolValue(CName, PackageList): +def ProtocolValue(CName, PackageList, Inffile = None): for P in PackageList: - if CName in P.Protocols: + ProtocolKeys = P.Protocols.keys() + if Inffile and P._PrivateProtocols: + if not Inffile.startswith(P.MetaFile.Dir): + ProtocolKeys = (dict.fromkeys(x for x in P.Protocols if x not in P._PrivateProtocols)).keys() + if CName in ProtocolKeys: return P.Protocols[CName] return None @@ -689,13 +860,18 @@ def ProtocolValue(CName, PackageList): # # @param CName The CName of the GUID # @param PackageList List of packages looking-up in +# @param Inffile The driver file # # @retval GuidValue if the CName is found in any given package # @retval None if the CName is not found in all given packages # -def PpiValue(CName, PackageList): +def PpiValue(CName, PackageList, Inffile = None): for P in PackageList: - if CName in P.Ppis: + PpiKeys = P.Ppis.keys() + if Inffile and P._PrivatePpis: + if not Inffile.startswith(P.MetaFile.Dir): + PpiKeys = (dict.fromkeys(x for x in P.Ppis if x not in P._PrivatePpis)).keys() + if CName in PpiKeys: return P.Ppis[CName] return None @@ -732,7 +908,7 @@ class TemplateString(object): # # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint # - for PlaceHolder,Start,End in PlaceHolderList: + for PlaceHolder, Start, End in PlaceHolderList: self._SubSectionList.append(TemplateSection[SubSectionStart:Start]) self._SubSectionList.append(TemplateSection[Start:End]) self._PlaceHolderList.append(PlaceHolder) @@ -1118,11 +1294,11 @@ class tdict: if len(key) > 1: RestKeys = key[1:] elif self._Level_ > 1: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] else: FirstKey = key if self._Level_ > 1: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList: FirstKey = self._Wildcard @@ -1195,11 +1371,11 @@ class tdict: if len(key) > 1: RestKeys = key[1:] else: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] else: FirstKey = key if self._Level_ > 1: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] if FirstKey in self._ValidWildcardList: FirstKey = self._Wildcard @@ -1264,32 +1440,7 @@ def ParseConsoleLog(Filename): Opr.close() Opw.close() -## AnalyzeDscPcd -# -# Analyze DSC PCD value, since there is no data type info in DSC -# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database -# 1. Feature flag: TokenSpace.PcdCName|PcdValue -# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize] -# 3. Dynamic default: -# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]] -# TokenSpace.PcdCName|PcdValue -# 4. Dynamic VPD: -# TokenSpace.PcdCName|VpdOffset[|VpdValue] -# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]] -# 5. Dynamic HII: -# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue] -# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which -# there might have "|" operator, also in string value. -# -# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped -# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII -# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL -# @retval: -# ValueList: A List contain fields described above -# IsValid: True if conforming EBNF, otherwise False -# Index: The index where PcdValue is in ValueList -# -def AnalyzeDscPcd(Setting, PcdType, DataType=''): +def AnalyzePcdExpression(Setting): Setting = Setting.strip() # There might be escaped quote in a string: \", \\\" Data = Setting.replace('\\\\', '//').replace('\\\"', '\\\'') @@ -1304,7 +1455,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): Pair += 1 elif ch == ')' and not InStr: Pair -= 1 - + if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT: NewStr += '-' else: @@ -1319,24 +1470,154 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): FieldList.append(Setting[StartPos:Pos].strip()) StartPos = Pos + 1 + return FieldList + +def ParseFieldValue (Value): + if type(Value) == type(0): + return Value, (Value.bit_length() + 7) / 8 + if type(Value) <> type(''): + raise ValueError + Value = Value.strip() + if Value.startswith('UINT8') and Value.endswith(')'): + Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1]) + if Size > 1: + raise ValueError + return Value, 1 + if Value.startswith('UINT16') and Value.endswith(')'): + Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1]) + if Size > 2: + raise ValueError + return Value, 2 + if Value.startswith('UINT32') and Value.endswith(')'): + Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1]) + if Size > 4: + raise ValueError + return Value, 4 + if Value.startswith('UINT64') and Value.endswith(')'): + Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1]) + if Size > 8: + raise ValueError + return Value, 8 + if Value.startswith('GUID') and Value.endswith(')'): + Value = Value.split('(', 1)[1][:-1].strip() + if Value[0] == '{' and Value[-1] == '}': + Value = Value[1:-1].strip() + Value = Value.split('{', 1) + Value = [Item.strip()[2:] for Item in (Value[0] + Value[1][:-1]).split(',')] + Value = '-'.join(Value[0:3]) + '-' + ''.join(Value[3:5]) + '-' + ''.join(Value[5:11]) + if Value[0] == '"' and Value[-1] == '"': + Value = Value[1:-1] + Value = "'" + uuid.UUID(Value).get_bytes_le() + "'" + Value, Size = ParseFieldValue(Value) + return Value, 16 + if Value.startswith('L"') and Value.endswith('"'): + # Unicode String + List = list(Value[2:-1]) + List.reverse() + Value = 0 + for Char in List: + Value = (Value << 16) | ord(Char) + return Value, (len(List) + 1) * 2 + if Value.startswith('"') and Value.endswith('"'): + # ASCII String + List = list(Value[1:-1]) + List.reverse() + Value = 0 + for Char in List: + Value = (Value << 8) | ord(Char) + return Value, len(List) + 1 + if Value.startswith("L'") and Value.endswith("'"): + # Unicode Character Constant + List = list(Value[2:-1]) + List.reverse() + Value = 0 + for Char in List: + Value = (Value << 16) | ord(Char) + return Value, len(List) * 2 + if Value.startswith("'") and Value.endswith("'"): + # Character constant + List = list(Value[1:-1]) + List.reverse() + Value = 0 + for Char in List: + Value = (Value << 8) | ord(Char) + return Value, len(List) + if Value.startswith('{') and Value.endswith('}'): + # Byte array + Value = Value[1:-1] + List = [Item.strip() for Item in Value.split(',')] + List.reverse() + Value = 0 + for Item in List: + ItemValue, Size = ParseFieldValue(Item) + if Size > 1: + raise ValueError + Value = (Value << 8) | ItemValue + return Value, len(List) + if Value.lower().startswith('0x'): + Value = int(Value, 16) + return Value, (Value.bit_length() + 7) / 8 + if Value[0].isdigit(): + Value = int(Value, 10) + return Value, (Value.bit_length() + 7) / 8 + if Value.lower() == 'true': + return 1, 1 + if Value.lower() == 'false': + return 0, 1 + return Value, 1 + +## AnalyzeDscPcd +# +# Analyze DSC PCD value, since there is no data type info in DSC +# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database +# 1. Feature flag: TokenSpace.PcdCName|PcdValue +# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize] +# 3. Dynamic default: +# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]] +# TokenSpace.PcdCName|PcdValue +# 4. Dynamic VPD: +# TokenSpace.PcdCName|VpdOffset[|VpdValue] +# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]] +# 5. Dynamic HII: +# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue] +# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which +# there might have "|" operator, also in string value. +# +# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped +# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII +# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL +# @retval: +# ValueList: A List contain fields described above +# IsValid: True if conforming EBNF, otherwise False +# Index: The index where PcdValue is in ValueList +# +def AnalyzeDscPcd(Setting, PcdType, DataType=''): + FieldList = AnalyzePcdExpression(Setting) + IsValid = True if PcdType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_FEATURE_FLAG): Value = FieldList[0] Size = '' if len(FieldList) > 1: - Type = FieldList[1] - # Fix the PCD type when no DataType input - if Type == 'VOID*': - DataType = 'VOID*' - else: + if FieldList[1].upper().startswith("0X") or FieldList[1].isdigit(): Size = FieldList[1] + else: + DataType = FieldList[1] + if len(FieldList) > 2: Size = FieldList[2] - if DataType == 'VOID*': - IsValid = (len(FieldList) <= 3) - else: + if DataType == "": IsValid = (len(FieldList) <= 1) - return [Value, '', Size], IsValid, 0 + else: + IsValid = (len(FieldList) <= 3) +# Value, Size = ParseFieldValue(Value) + if Size: + try: + int(Size,16) if Size.upper().startswith("0X") else int(Size) + except: + IsValid = False + Size = -1 + return [str(Value), '', str(Size)], IsValid, 0 elif PcdType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT): Value = FieldList[0] Size = Type = '' @@ -1354,11 +1635,18 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): Size = str(len(Value.split(","))) else: Size = str(len(Value) -2 + 1 ) - if DataType == 'VOID*': - IsValid = (len(FieldList) <= 3) - else: + if DataType == "": IsValid = (len(FieldList) <= 1) - return [Value, Type, Size], IsValid, 0 + else: + IsValid = (len(FieldList) <= 3) + + if Size: + try: + int(Size,16) if Size.upper().startswith("0X") else int(Size) + except: + IsValid = False + Size = -1 + return [Value, Type, str(Size)], IsValid, 0 elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD): VpdOffset = FieldList[0] Value = Size = '' @@ -1370,22 +1658,30 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): Size = FieldList[1] if len(FieldList) > 2: Value = FieldList[2] - if DataType == 'VOID*': - IsValid = (len(FieldList) <= 3) + if DataType == "": + IsValid = (len(FieldList) <= 1) else: - IsValid = (len(FieldList) <= 2) - return [VpdOffset, Size, Value], IsValid, 2 + IsValid = (len(FieldList) <= 3) + if Size: + try: + int(Size,16) if Size.upper().startswith("0X") else int(Size) + except: + IsValid = False + Size = -1 + return [VpdOffset, str(Size), Value], IsValid, 2 elif PcdType in (MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII): HiiString = FieldList[0] - Guid = Offset = Value = '' + Guid = Offset = Value = Attribute = '' if len(FieldList) > 1: Guid = FieldList[1] if len(FieldList) > 2: Offset = FieldList[2] if len(FieldList) > 3: Value = FieldList[3] - IsValid = (3 <= len(FieldList) <= 4) - return [HiiString, Guid, Offset, Value], IsValid, 3 + if len(FieldList) > 4: + Attribute = FieldList[4] + IsValid = (3 <= len(FieldList) <= 5) + return [HiiString, Guid, Offset, Value, Attribute], IsValid, 3 return [], False, 0 ## AnalyzePcdData @@ -1397,17 +1693,17 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): # # @retval ValueList: A List contain value, datum type and toke number. # -def AnalyzePcdData(Setting): - ValueList = ['', '', ''] - - ValueRe = re.compile(r'^\s*L?\".*\|.*\"') +def AnalyzePcdData(Setting): + ValueList = ['', '', ''] + + ValueRe = re.compile(r'^\s*L?\".*\|.*\"') PtrValue = ValueRe.findall(Setting) ValueUpdateFlag = False if len(PtrValue) >= 1: Setting = re.sub(ValueRe, '', Setting) - ValueUpdateFlag = True + ValueUpdateFlag = True TokenList = Setting.split(TAB_VALUE_SPLIT) ValueList[0:len(TokenList)] = TokenList @@ -1443,17 +1739,17 @@ def AnalyzeHiiPcdData(Setting): # # @retval ValueList: A List contain VpdOffset, MaxDatumSize and InitialValue. # -def AnalyzeVpdPcdData(Setting): - ValueList = ['', '', ''] - - ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') +def AnalyzeVpdPcdData(Setting): + ValueList = ['', '', ''] + + ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') PtrValue = ValueRe.findall(Setting) ValueUpdateFlag = False if len(PtrValue) >= 1: Setting = re.sub(ValueRe, '', Setting) - ValueUpdateFlag = True + ValueUpdateFlag = True TokenList = Setting.split(TAB_VALUE_SPLIT) ValueList[0:len(TokenList)] = TokenList @@ -1469,12 +1765,12 @@ def AnalyzeVpdPcdData(Setting): # def CheckPcdDatum(Type, Value): if Type == "VOID*": - ValueRe = re.compile(r'\s*L?\".*\"\s*$') + ValueRe = re.compile(r'\s*L?\".*\"\s*$') if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"')) or (Value.startswith('{') and Value.endswith('}')) ): return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\ - ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) + ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) elif ValueRe.match(Value): # Check the chars in UnicodeString or CString is printable if Value.startswith("L"): @@ -1500,7 +1796,7 @@ def CheckPcdDatum(Type, Value): return False, "Invalid value [%s] of type [%s];"\ " must be a hexadecimal, decimal or octal in C language format." % (Value, Type) else: - return False, "Invalid type [%s]; must be one of VOID*, BOOLEAN, UINT8, UINT16, UINT32, UINT64." % (Type) + return True, "StructurePcd" return True, "" @@ -1527,7 +1823,7 @@ def SplitOption(OptionString): if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]: if Index > OptionStart: - OptionList.append(OptionString[OptionStart:Index-1]) + OptionList.append(OptionString[OptionStart:Index - 1]) OptionStart = Index LastChar = CurrentChar OptionList.append(OptionString[OptionStart:]) @@ -1594,6 +1890,7 @@ class PathClass(object): # Remove any '.' and '..' in path if self.Root: + self.Root = mws.getWs(self.Root, self.File) self.Path = os.path.normpath(os.path.join(self.Root, self.File)) self.Root = os.path.normpath(CommonPath([self.Root, self.Path])) # eliminate the side-effect of 'C:' @@ -1603,7 +1900,7 @@ class PathClass(object): if self.Root[-1] == os.path.sep: self.File = self.Path[len(self.Root):] else: - self.File = self.Path[len(self.Root)+1:] + self.File = self.Path[len(self.Root) + 1:] else: self.Path = os.path.normpath(self.File) @@ -1704,7 +2001,10 @@ class PathClass(object): RealFile = os.path.join(self.AlterRoot, self.File) elif self.Root: RealFile = os.path.join(self.Root, self.File) - return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile) + if len (mws.getPkgPath()) == 0: + return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile) + else: + return FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.File, '\n\t'.join(mws.getPkgPath())) ErrorCode = 0 ErrorInfo = '' @@ -1811,58 +2111,237 @@ class PeImageClass(): Value = (Value << 8) | int(ByteList[index]) return Value +class DefaultStore(): + def __init__(self,DefaultStores ): + self.DefaultStores = DefaultStores + def DefaultStoreID(self,DefaultStoreName): + for key,value in self.DefaultStores.items(): + if value == DefaultStoreName: + return key + return None + def GetDefaultDefault(self): + if not self.DefaultStores or "0" in self.DefaultStores: + return "0",TAB_DEFAULT_STORES_DEFAULT + else: + minvalue = min([int(value_str) for value_str in self.DefaultStores.keys()]) + return (str(minvalue), self.DefaultStores[str(minvalue)]) + def GetMin(self,DefaultSIdList): + if not DefaultSIdList: + return "STANDARD" + storeidset = {storeid for storeid, storename in self.DefaultStores.values() if storename in DefaultSIdList} + if not storeidset: + return "" + minid = min(storeidset ) + for sid,name in self.DefaultStores.values(): + if sid == minid: + return name class SkuClass(): DEFAULT = 0 SINGLE = 1 MULTIPLE =2 - def __init__(self,SkuIdentifier='', SkuIds={}): + def __init__(self,SkuIdentifier='', SkuIds=None): + if SkuIds is None: + SkuIds = {} + + for SkuName in SkuIds: + SkuId = SkuIds[SkuName][0] + skuid_num = int(SkuId,16) if SkuId.upper().startswith("0X") else int(SkuId) + if skuid_num > 0xFFFFFFFFFFFFFFFF: + EdkLogger.error("build", PARAMETER_INVALID, + ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64" + % (SkuName, SkuId)) self.AvailableSkuIds = sdict() self.SkuIdSet = [] - + self.SkuIdNumberSet = [] + self.SkuData = SkuIds + self.__SkuInherit = {} + self.__SkuIdentifier = SkuIdentifier if SkuIdentifier == '' or SkuIdentifier is None: self.SkuIdSet = ['DEFAULT'] + self.SkuIdNumberSet = ['0U'] elif SkuIdentifier == 'ALL': self.SkuIdSet = SkuIds.keys() + self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()] else: r = SkuIdentifier.split('|') - self.SkuIdSet=[r[k].strip() for k in range(len(r))] - if len(self.SkuIdSet) == 2 and 'DEFAULT' in self.SkuIdSet and SkuIdentifier != 'ALL': - self.SkuIdSet.remove('DEFAULT') - + self.SkuIdSet=[(r[k].strip()).upper() for k in range(len(r))] + k = None + try: + self.SkuIdNumberSet = [SkuIds[k][0].strip() + 'U' for k in self.SkuIdSet] + except Exception: + EdkLogger.error("build", PARAMETER_INVALID, + ExtraData = "SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]" + % (k, " | ".join(SkuIds.keys()))) for each in self.SkuIdSet: if each in SkuIds: - self.AvailableSkuIds[each] = SkuIds[each] + self.AvailableSkuIds[each] = SkuIds[each][0] else: EdkLogger.error("build", PARAMETER_INVALID, ExtraData="SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]" - % (each, " ".join(SkuIds.keys()))) + % (each, " | ".join(SkuIds.keys()))) + if self.SkuUsageType != self.SINGLE: + self.AvailableSkuIds.update({'DEFAULT':0, 'COMMON':0}) + if self.SkuIdSet: + GlobalData.gSkuids = (self.SkuIdSet) + if 'COMMON' in GlobalData.gSkuids: + GlobalData.gSkuids.remove('COMMON') + if GlobalData.gSkuids: + GlobalData.gSkuids.sort() + + def GetNextSkuId(self, skuname): + if not self.__SkuInherit: + self.__SkuInherit = {} + for item in self.SkuData.values(): + self.__SkuInherit[item[1]]=item[2] if item[2] else "DEFAULT" + return self.__SkuInherit.get(skuname,"DEFAULT") + + def GetSkuChain(self,sku): + skulist = [sku] + nextsku = sku + while 1: + nextsku = self.GetNextSkuId(nextsku) + skulist.append(nextsku) + if nextsku == "DEFAULT": + break + skulist.reverse() + return skulist + def SkuOverrideOrder(self): + skuorderset = [] + for skuname in self.SkuIdSet: + skuorderset.append(self.GetSkuChain(skuname)) + skuorder = [] + for index in range(max([len(item) for item in skuorderset])): + for subset in skuorderset: + if index > len(subset)-1: + continue + if subset[index] in skuorder: + continue + skuorder.append(subset[index]) + + return skuorder + def __SkuUsageType(self): + if self.__SkuIdentifier.upper() == "ALL": + return SkuClass.MULTIPLE + if len(self.SkuIdSet) == 1: if self.SkuIdSet[0] == 'DEFAULT': return SkuClass.DEFAULT else: return SkuClass.SINGLE + elif len(self.SkuIdSet) == 2: + if 'DEFAULT' in self.SkuIdSet: + return SkuClass.SINGLE + else: + return SkuClass.MULTIPLE else: return SkuClass.MULTIPLE + def DumpSkuIdArrary(self): + ArrayStrList = [] + if self.SkuUsageType == SkuClass.SINGLE: + ArrayStr = "{0x0}" + else: + for skuname in self.AvailableSkuIds: + if skuname == "COMMON": + continue + while skuname != "DEFAULT": + ArrayStrList.append(hex(int(self.AvailableSkuIds[skuname]))) + skuname = self.GetNextSkuId(skuname) + ArrayStrList.append("0x0") + ArrayStr = "{" + ",".join(ArrayStrList) + "}" + return ArrayStr def __GetAvailableSkuIds(self): return self.AvailableSkuIds def __GetSystemSkuID(self): if self.__SkuUsageType() == SkuClass.SINGLE: - return self.SkuIdSet[0] + if len(self.SkuIdSet) == 1: + return self.SkuIdSet[0] + else: + return self.SkuIdSet[0] if self.SkuIdSet[0] != 'DEFAULT' else self.SkuIdSet[1] else: return 'DEFAULT' - + def __GetAvailableSkuIdNumber(self): + return self.SkuIdNumberSet SystemSkuId = property(__GetSystemSkuID) AvailableSkuIdSet = property(__GetAvailableSkuIds) SkuUsageType = property(__SkuUsageType) + AvailableSkuIdNumSet = property(__GetAvailableSkuIdNumber) + +# +# Pack a registry format GUID +# +def PackRegistryFormatGuid(Guid): + Guid = Guid.split('-') + return pack('=LHHBBBBBBBB', + int(Guid[0], 16), + int(Guid[1], 16), + int(Guid[2], 16), + int(Guid[3][-4:-2], 16), + int(Guid[3][-2:], 16), + int(Guid[4][-12:-10], 16), + int(Guid[4][-10:-8], 16), + int(Guid[4][-8:-6], 16), + int(Guid[4][-6:-4], 16), + int(Guid[4][-4:-2], 16), + int(Guid[4][-2:], 16) + ) + +def BuildOptionPcdValueFormat(TokenSpaceGuidCName, TokenCName, PcdDatumType, Value): + if PcdDatumType == 'VOID*': + if Value.startswith('L'): + if not Value[1]: + EdkLogger.error("build", FORMAT_INVALID, 'For Void* type PCD, when specify the Value in the command line, please use the following format: "string", L"string", H"{...}"') + Value = Value[0] + '"' + Value[1:] + '"' + elif Value.startswith('H'): + if not Value[1]: + EdkLogger.error("build", FORMAT_INVALID, 'For Void* type PCD, when specify the Value in the command line, please use the following format: "string", L"string", H"{...}"') + Value = Value[1:] + else: + if not Value[0]: + EdkLogger.error("build", FORMAT_INVALID, 'For Void* type PCD, when specify the Value in the command line, please use the following format: "string", L"string", H"{...}"') + Value = '"' + Value + '"' + + IsValid, Cause = CheckPcdDatum(PcdDatumType, Value) + if not IsValid: + EdkLogger.error("build", FORMAT_INVALID, Cause, ExtraData="%s.%s" % (TokenSpaceGuidCName, TokenCName)) + if PcdDatumType == 'BOOLEAN': + Value = Value.upper() + if Value == 'TRUE' or Value == '1': + Value = '1' + elif Value == 'FALSE' or Value == '0': + Value = '0' + return Value +## Get the integer value from string like "14U" or integer like 2 +# +# @param Input The object that may be either a integer value or a string +# +# @retval Value The integer value that the input represents +# +def GetIntegerValue(Input): + if type(Input) in (int, long): + return Input + String = Input + if String.endswith("U"): + String = String[:-1] + if String.endswith("ULL"): + String = String[:-3] + if String.endswith("LL"): + String = String[:-2] + + if String.startswith("0x") or String.startswith("0X"): + return int(String, 16) + elif String == '': + return 0 + else: + return int(String) ## #