## @file\r
# Common routines used by all tools\r
#\r
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>\r
+# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>\r
# This program and the accompanying materials\r
# are licensed and made available under the terms and conditions of the BSD License\r
# which accompanies this distribution. The full text of the license may be found at\r
import cPickle\r
import array\r
import shutil\r
+from struct import pack\r
from UserDict import IterableUserDict\r
from UserList import UserList\r
\r
from CommonDataClass.DataClass import *\r
from Parsing import GetSplitValueList\r
from Common.LongFilePathSupport import OpenLongFilePath as open\r
+from Common.MultipleWorkspace import MultipleWorkspace as mws\r
\r
## Regular expression used to find out place holders in string template\r
-gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE)\r
+gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE)\r
\r
## Dictionary used to store file time stamp for quick re-access\r
gFileTimeStampCache = {} # {file path : file time stamp}\r
## Dictionary used to store dependencies of files\r
gDependencyDatabase = {} # arch : {file path : [dependent files list]}\r
\r
+def GetVariableOffset(mapfilepath, efifilepath, varnames):\r
+ """ Parse map file to get variable offset in current EFI file \r
+ @param mapfilepath Map file absolution path\r
+ @param efifilepath: EFI binary file full path\r
+ @param varnames iteratable container whose elements are variable names to be searched\r
+ \r
+ @return List whos elements are tuple with variable name and raw offset\r
+ """\r
+ lines = []\r
+ try:\r
+ f = open(mapfilepath, 'r')\r
+ lines = f.readlines()\r
+ f.close()\r
+ except:\r
+ return None\r
+ \r
+ if len(lines) == 0: return None\r
+ firstline = lines[0].strip()\r
+ if (firstline.startswith("Archive member included ") and\r
+ firstline.endswith(" file (symbol)")):\r
+ return _parseForGCC(lines, efifilepath, varnames)\r
+ return _parseGeneral(lines, efifilepath, varnames)\r
+\r
+def _parseForGCC(lines, efifilepath, varnames):\r
+ """ Parse map file generated by GCC linker """\r
+ status = 0\r
+ sections = []\r
+ varoffset = []\r
+ for index, line in enumerate(lines):\r
+ line = line.strip()\r
+ # status machine transection\r
+ if status == 0 and line == "Memory Configuration":\r
+ status = 1\r
+ continue\r
+ elif status == 1 and line == 'Linker script and memory map':\r
+ status = 2\r
+ continue\r
+ elif status ==2 and line == 'START GROUP':\r
+ status = 3\r
+ continue\r
+\r
+ # status handler\r
+ if status == 3:\r
+ m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)\r
+ if m != None:\r
+ sections.append(m.groups(0))\r
+ for varname in varnames:\r
+ Str = ''\r
+ m = re.match("^.data.(%s)" % varname, line)\r
+ if m != None:\r
+ m = re.match(".data.(%s)$" % varname, line)\r
+ if m != None:\r
+ Str = lines[index + 1]\r
+ else:\r
+ Str = line[len(".data.%s" % varname):]\r
+ if Str:\r
+ m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip())\r
+ if m != None:\r
+ varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))\r
+\r
+ if not varoffset:\r
+ return []\r
+ # get section information from efi file\r
+ efisecs = PeImageClass(efifilepath).SectionHeaderList\r
+ if efisecs == None or len(efisecs) == 0:\r
+ return []\r
+ #redirection\r
+ redirection = 0\r
+ for efisec in efisecs:\r
+ for section in sections:\r
+ if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text':\r
+ redirection = int(section[1], 16) - efisec[1]\r
+\r
+ ret = []\r
+ for var in varoffset:\r
+ for efisec in efisecs:\r
+ if var[1] >= efisec[1] and var[1] < efisec[1]+efisec[3]:\r
+ ret.append((var[0], hex(efisec[2] + var[1] - efisec[1] - redirection)))\r
+ return ret\r
+\r
+def _parseGeneral(lines, efifilepath, varnames):\r
+ status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table\r
+ secs = [] # key = section name\r
+ varoffset = []\r
+ secRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\da-fA-F]+)[Hh]? +([.\w\$]+) +(\w+)', re.UNICODE)\r
+ symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.:\\\\\w\?@\$]+) +([\da-fA-F]+)', re.UNICODE)\r
+\r
+ for line in lines:\r
+ line = line.strip()\r
+ if re.match("^Start[' ']+Length[' ']+Name[' ']+Class", line):\r
+ status = 1\r
+ continue\r
+ if re.match("^Address[' ']+Publics by Value[' ']+Rva\+Base", line):\r
+ status = 2\r
+ continue\r
+ if re.match("^entry point at", line):\r
+ status = 3\r
+ continue \r
+ if status == 1 and len(line) != 0:\r
+ m = secRe.match(line)\r
+ assert m != None, "Fail to parse the section in map file , line is %s" % line\r
+ sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)\r
+ secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])\r
+ if status == 2 and len(line) != 0:\r
+ for varname in varnames:\r
+ m = symRe.match(line)\r
+ assert m != None, "Fail to parse the symbol in map file, line is %s" % line\r
+ sec_no, sym_offset, sym_name, vir_addr = m.groups(0)\r
+ sec_no = int(sec_no, 16)\r
+ sym_offset = int(sym_offset, 16)\r
+ vir_addr = int(vir_addr, 16)\r
+ m2 = re.match('^[_]*(%s)' % varname, sym_name)\r
+ if m2 != None:\r
+ # fond a binary pcd entry in map file\r
+ for sec in secs:\r
+ if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):\r
+ varoffset.append([varname, sec[3], sym_offset, vir_addr, sec_no])\r
+\r
+ if not varoffset: return []\r
+\r
+ # get section information from efi file\r
+ efisecs = PeImageClass(efifilepath).SectionHeaderList\r
+ if efisecs == None or len(efisecs) == 0:\r
+ return []\r
+\r
+ ret = []\r
+ for var in varoffset:\r
+ index = 0\r
+ for efisec in efisecs:\r
+ index = index + 1\r
+ if var[1].strip() == efisec[0].strip():\r
+ ret.append((var[0], hex(efisec[2] + var[2])))\r
+ elif var[4] == index:\r
+ ret.append((var[0], hex(efisec[2] + var[2])))\r
+\r
+ return ret\r
+\r
## Routine to process duplicated INF\r
#\r
# This function is called by following two cases:\r
def GuidStringToGuidStructureString(Guid):\r
GuidList = Guid.split('-')\r
Result = '{'\r
- for Index in range(0,3,1):\r
+ for Index in range(0, 3, 1):\r
Result = Result + '0x' + GuidList[Index] + ', '\r
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]\r
- for Index in range(0,12,2):\r
- Result = Result + ', 0x' + GuidList[4][Index:Index+2]\r
+ for Index in range(0, 12, 2):\r
+ Result = Result + ', 0x' + GuidList[4][Index:Index + 2]\r
Result += '}}'\r
return Result\r
\r
Fd.write(Content)\r
Fd.close()\r
except IOError, X:\r
- EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X)\r
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
\r
return True\r
\r
#\r
# @retval A list of all files\r
#\r
-def GetFiles(Root, SkipList=None, FullPath = True):\r
+def GetFiles(Root, SkipList=None, FullPath=True):\r
OriPath = Root\r
FileList = []\r
for Root, Dirs, Files in os.walk(Root):\r
if OverrideDir[-1] == os.path.sep:\r
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]\r
else:\r
- return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)]\r
+ return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)]\r
if GlobalData.gAllFiles:\r
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]\r
if not NewFile:\r
if Dir[-1] == os.path.sep:\r
return NewFile[len(Dir):], NewFile[0:len(Dir)]\r
else:\r
- return NewFile[len(Dir)+1:], NewFile[0:len(Dir)]\r
+ return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]\r
else:\r
return NewFile, ''\r
\r
# Replace the default dir to current dir\r
if Dir == '.':\r
Dir = os.getcwd()\r
- Dir = Dir[len(Workspace)+1:]\r
+ Dir = Dir[len(Workspace) + 1:]\r
\r
# First check if File has Edk definition itself\r
if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:\r
# Dir is current module dir related to workspace\r
if Dir == '.':\r
Dir = os.getcwd()\r
- Dir = Dir[len(Workspace)+1:]\r
+ Dir = Dir[len(Workspace) + 1:]\r
\r
NewFile = File\r
RelaPath = AllFiles[os.path.normpath(Dir)]\r
#\r
# @param CName The CName of the GUID\r
# @param PackageList List of packages looking-up in\r
+# @param Inffile The driver file\r
#\r
# @retval GuidValue if the CName is found in any given package\r
# @retval None if the CName is not found in all given packages\r
#\r
-def GuidValue(CName, PackageList):\r
+def GuidValue(CName, PackageList, Inffile = None):\r
for P in PackageList:\r
- if CName in P.Guids:\r
+ GuidKeys = P.Guids.keys()\r
+ if Inffile and P._PrivateGuids:\r
+ if not Inffile.startswith(P.MetaFile.Dir):\r
+ GuidKeys = (dict.fromkeys(x for x in P.Guids if x not in P._PrivateGuids)).keys()\r
+ if CName in GuidKeys:\r
return P.Guids[CName]\r
return None\r
\r
#\r
# @param CName The CName of the GUID\r
# @param PackageList List of packages looking-up in\r
+# @param Inffile The driver file\r
#\r
# @retval GuidValue if the CName is found in any given package\r
# @retval None if the CName is not found in all given packages\r
#\r
-def ProtocolValue(CName, PackageList):\r
+def ProtocolValue(CName, PackageList, Inffile = None):\r
for P in PackageList:\r
- if CName in P.Protocols:\r
+ ProtocolKeys = P.Protocols.keys()\r
+ if Inffile and P._PrivateProtocols:\r
+ if not Inffile.startswith(P.MetaFile.Dir):\r
+ ProtocolKeys = (dict.fromkeys(x for x in P.Protocols if x not in P._PrivateProtocols)).keys()\r
+ if CName in ProtocolKeys:\r
return P.Protocols[CName]\r
return None\r
\r
#\r
# @param CName The CName of the GUID\r
# @param PackageList List of packages looking-up in\r
+# @param Inffile The driver file\r
#\r
# @retval GuidValue if the CName is found in any given package\r
# @retval None if the CName is not found in all given packages\r
#\r
-def PpiValue(CName, PackageList):\r
+def PpiValue(CName, PackageList, Inffile = None):\r
for P in PackageList:\r
- if CName in P.Ppis:\r
+ PpiKeys = P.Ppis.keys()\r
+ if Inffile and P._PrivatePpis:\r
+ if not Inffile.startswith(P.MetaFile.Dir):\r
+ PpiKeys = (dict.fromkeys(x for x in P.Ppis if x not in P._PrivatePpis)).keys()\r
+ if CName in PpiKeys:\r
return P.Ppis[CName]\r
return None\r
\r
#\r
# PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint\r
#\r
- for PlaceHolder,Start,End in PlaceHolderList:\r
+ for PlaceHolder, Start, End in PlaceHolderList:\r
self._SubSectionList.append(TemplateSection[SubSectionStart:Start])\r
self._SubSectionList.append(TemplateSection[Start:End])\r
self._PlaceHolderList.append(PlaceHolder)\r
if len(key) > 1:\r
RestKeys = key[1:]\r
elif self._Level_ > 1:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
else:\r
FirstKey = key\r
if self._Level_ > 1:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
\r
if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:\r
FirstKey = self._Wildcard\r
if len(key) > 1:\r
RestKeys = key[1:]\r
else:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
else:\r
FirstKey = key\r
if self._Level_ > 1:\r
- RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]\r
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
\r
if FirstKey in self._ValidWildcardList:\r
FirstKey = self._Wildcard\r
Opr.close()\r
Opw.close()\r
\r
-## AnalyzeDscPcd\r
-#\r
-# Analyze DSC PCD value, since there is no data type info in DSC\r
-# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database\r
-# 1. Feature flag: TokenSpace.PcdCName|PcdValue\r
-# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize]\r
-# 3. Dynamic default:\r
-# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]\r
-# TokenSpace.PcdCName|PcdValue\r
-# 4. Dynamic VPD:\r
-# TokenSpace.PcdCName|VpdOffset[|VpdValue]\r
-# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]]\r
-# 5. Dynamic HII:\r
-# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue]\r
-# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which\r
-# there might have "|" operator, also in string value.\r
-#\r
-# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped\r
-# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII\r
-# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL\r
-# @retval:\r
-# ValueList: A List contain fields described above\r
-# IsValid: True if conforming EBNF, otherwise False\r
-# Index: The index where PcdValue is in ValueList\r
-#\r
-def AnalyzeDscPcd(Setting, PcdType, DataType=''):\r
+def AnalyzePcdExpression(Setting):\r
Setting = Setting.strip()\r
# There might be escaped quote in a string: \", \\\"\r
Data = Setting.replace('\\\\', '//').replace('\\\"', '\\\'')\r
Pair += 1\r
elif ch == ')' and not InStr:\r
Pair -= 1\r
- \r
+\r
if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT:\r
NewStr += '-'\r
else:\r
FieldList.append(Setting[StartPos:Pos].strip())\r
StartPos = Pos + 1\r
\r
+ return FieldList\r
+\r
+## AnalyzeDscPcd\r
+#\r
+# Analyze DSC PCD value, since there is no data type info in DSC\r
+# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database\r
+# 1. Feature flag: TokenSpace.PcdCName|PcdValue\r
+# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize]\r
+# 3. Dynamic default:\r
+# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]\r
+# TokenSpace.PcdCName|PcdValue\r
+# 4. Dynamic VPD:\r
+# TokenSpace.PcdCName|VpdOffset[|VpdValue]\r
+# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]]\r
+# 5. Dynamic HII:\r
+# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue]\r
+# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which\r
+# there might have "|" operator, also in string value.\r
+#\r
+# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped\r
+# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII\r
+# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL\r
+# @retval:\r
+# ValueList: A List contain fields described above\r
+# IsValid: True if conforming EBNF, otherwise False\r
+# Index: The index where PcdValue is in ValueList\r
+#\r
+def AnalyzeDscPcd(Setting, PcdType, DataType=''):\r
+ FieldList = AnalyzePcdExpression(Setting)\r
+\r
IsValid = True\r
if PcdType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_FEATURE_FLAG):\r
Value = FieldList[0]\r
IsValid = (len(FieldList) <= 3)\r
else:\r
IsValid = (len(FieldList) <= 1)\r
- return [Value, Type, Size], IsValid, 0 \r
+ return [Value, Type, Size], IsValid, 0\r
elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):\r
VpdOffset = FieldList[0]\r
Value = Size = ''\r
return [VpdOffset, Size, Value], IsValid, 2\r
elif PcdType in (MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII):\r
HiiString = FieldList[0]\r
- Guid = Offset = Value = ''\r
+ Guid = Offset = Value = Attribute = ''\r
if len(FieldList) > 1:\r
Guid = FieldList[1]\r
if len(FieldList) > 2:\r
Offset = FieldList[2]\r
if len(FieldList) > 3:\r
Value = FieldList[3]\r
- IsValid = (3 <= len(FieldList) <= 4)\r
- return [HiiString, Guid, Offset, Value], IsValid, 3\r
+ if len(FieldList) > 4:\r
+ Attribute = FieldList[4]\r
+ IsValid = (3 <= len(FieldList) <= 5)\r
+ return [HiiString, Guid, Offset, Value, Attribute], IsValid, 3\r
return [], False, 0\r
\r
## AnalyzePcdData\r
# \r
# @retval ValueList: A List contain value, datum type and toke number. \r
#\r
-def AnalyzePcdData(Setting): \r
- ValueList = ['', '', ''] \r
- \r
- ValueRe = re.compile(r'^\s*L?\".*\|.*\"')\r
+def AnalyzePcdData(Setting):\r
+ ValueList = ['', '', '']\r
+\r
+ ValueRe = re.compile(r'^\s*L?\".*\|.*\"')\r
PtrValue = ValueRe.findall(Setting)\r
\r
ValueUpdateFlag = False\r
\r
if len(PtrValue) >= 1:\r
Setting = re.sub(ValueRe, '', Setting)\r
- ValueUpdateFlag = True \r
+ ValueUpdateFlag = True\r
\r
TokenList = Setting.split(TAB_VALUE_SPLIT)\r
ValueList[0:len(TokenList)] = TokenList\r
# \r
# @retval ValueList: A List contain VpdOffset, MaxDatumSize and InitialValue. \r
#\r
-def AnalyzeVpdPcdData(Setting): \r
- ValueList = ['', '', ''] \r
- \r
- ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')\r
+def AnalyzeVpdPcdData(Setting):\r
+ ValueList = ['', '', '']\r
+\r
+ ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')\r
PtrValue = ValueRe.findall(Setting)\r
\r
ValueUpdateFlag = False\r
\r
if len(PtrValue) >= 1:\r
Setting = re.sub(ValueRe, '', Setting)\r
- ValueUpdateFlag = True \r
+ ValueUpdateFlag = True\r
\r
TokenList = Setting.split(TAB_VALUE_SPLIT)\r
ValueList[0:len(TokenList)] = TokenList\r
#\r
def CheckPcdDatum(Type, Value):\r
if Type == "VOID*":\r
- ValueRe = re.compile(r'\s*L?\".*\"\s*$')\r
+ ValueRe = re.compile(r'\s*L?\".*\"\s*$')\r
if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"'))\r
or (Value.startswith('{') and Value.endswith('}'))\r
):\r
return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\\r
- ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) \r
+ ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type)\r
elif ValueRe.match(Value):\r
# Check the chars in UnicodeString or CString is printable\r
if Value.startswith("L"):\r
\r
if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:\r
if Index > OptionStart:\r
- OptionList.append(OptionString[OptionStart:Index-1])\r
+ OptionList.append(OptionString[OptionStart:Index - 1])\r
OptionStart = Index\r
LastChar = CurrentChar\r
OptionList.append(OptionString[OptionStart:])\r
\r
# Remove any '.' and '..' in path\r
if self.Root:\r
+ self.Root = mws.getWs(self.Root, self.File)\r
self.Path = os.path.normpath(os.path.join(self.Root, self.File))\r
self.Root = os.path.normpath(CommonPath([self.Root, self.Path]))\r
# eliminate the side-effect of 'C:'\r
if self.Root[-1] == os.path.sep:\r
self.File = self.Path[len(self.Root):]\r
else:\r
- self.File = self.Path[len(self.Root)+1:]\r
+ self.File = self.Path[len(self.Root) + 1:]\r
else:\r
self.Path = os.path.normpath(self.File)\r
\r
RealFile = os.path.join(self.AlterRoot, self.File)\r
elif self.Root:\r
RealFile = os.path.join(self.Root, self.File)\r
- return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile)\r
+ if len (mws.getPkgPath()) == 0:\r
+ return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile)\r
+ else:\r
+ return FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.File, '\n\t'.join(mws.getPkgPath()))\r
\r
ErrorCode = 0\r
ErrorInfo = ''\r
\r
self.AvailableSkuIds = sdict()\r
self.SkuIdSet = []\r
- \r
+ self.SkuIdNumberSet = []\r
if SkuIdentifier == '' or SkuIdentifier is None:\r
self.SkuIdSet = ['DEFAULT']\r
+ self.SkuIdNumberSet = ['0U']\r
elif SkuIdentifier == 'ALL':\r
self.SkuIdSet = SkuIds.keys()\r
+ self.SkuIdNumberSet = [num.strip() + 'U' for num in SkuIds.values()]\r
else:\r
r = SkuIdentifier.split('|') \r
self.SkuIdSet=[r[k].strip() for k in range(len(r))] \r
+ k = None\r
+ try: \r
+ self.SkuIdNumberSet = [SkuIds[k].strip() + 'U' for k in self.SkuIdSet] \r
+ except Exception:\r
+ EdkLogger.error("build", PARAMETER_INVALID,\r
+ ExtraData = "SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"\r
+ % (k, " ".join(SkuIds.keys())))\r
if len(self.SkuIdSet) == 2 and 'DEFAULT' in self.SkuIdSet and SkuIdentifier != 'ALL':\r
self.SkuIdSet.remove('DEFAULT')\r
- \r
+ self.SkuIdNumberSet.remove('0U')\r
for each in self.SkuIdSet:\r
if each in SkuIds:\r
self.AvailableSkuIds[each] = SkuIds[each]\r
return self.SkuIdSet[0]\r
else:\r
return 'DEFAULT'\r
- \r
+ def __GetAvailableSkuIdNumber(self):\r
+ return self.SkuIdNumberSet\r
SystemSkuId = property(__GetSystemSkuID)\r
AvailableSkuIdSet = property(__GetAvailableSkuIds)\r
SkuUsageType = property(__SkuUsageType)\r
+ AvailableSkuIdNumSet = property(__GetAvailableSkuIdNumber)\r
+\r
+#\r
+# Pack a registry format GUID\r
+#\r
+def PackRegistryFormatGuid(Guid):\r
+ Guid = Guid.split('-')\r
+ return pack('=LHHBBBBBBBB',\r
+ int(Guid[0], 16),\r
+ int(Guid[1], 16),\r
+ int(Guid[2], 16),\r
+ int(Guid[3][-4:-2], 16),\r
+ int(Guid[3][-2:], 16),\r
+ int(Guid[4][-12:-10], 16),\r
+ int(Guid[4][-10:-8], 16),\r
+ int(Guid[4][-8:-6], 16),\r
+ int(Guid[4][-6:-4], 16),\r
+ int(Guid[4][-4:-2], 16),\r
+ int(Guid[4][-2:], 16)\r
+ )\r
\r
##\r
#\r