X-Git-Url: https://git.proxmox.com/?a=blobdiff_plain;f=BaseTools%2FSource%2FPython%2FCommon%2FMisc.py;h=1a5968a221654c725dfc8625931d429d471b98c0;hb=d03c056b2946cc2f83b6d206297915dadc08f230;hp=19a1319639a54326364ea47f29888eaa908e4436;hpb=9b9500a1c9e620c9aac565679e2eb77255ba3306;p=mirror_edk2.git diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py index 19a1319639..1a5968a221 100644 --- a/BaseTools/Source/Python/Common/Misc.py +++ b/BaseTools/Source/Python/Common/Misc.py @@ -1,7 +1,7 @@ ## @file # Common routines used by all tools # -# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.
+# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.
# This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -24,6 +24,7 @@ import re import cPickle import array import shutil +from struct import pack from UserDict import IterableUserDict from UserList import UserList @@ -34,9 +35,10 @@ from BuildToolError import * from CommonDataClass.DataClass import * from Parsing import GetSplitValueList from Common.LongFilePathSupport import OpenLongFilePath as open +from Common.MultipleWorkspace import MultipleWorkspace as mws ## Regular expression used to find out place holders in string template -gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE) +gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE) ## Dictionary used to store file time stamp for quick re-access gFileTimeStampCache = {} # {file path : file time stamp} @@ -44,6 +46,143 @@ gFileTimeStampCache = {} # {file path : file time stamp} ## Dictionary used to store dependencies of files gDependencyDatabase = {} # arch : {file path : [dependent files list]} +def GetVariableOffset(mapfilepath, efifilepath, varnames): + """ Parse map file to get variable offset in current EFI file + @param mapfilepath Map file absolution path + @param efifilepath: EFI binary file full path + @param varnames iteratable container whose elements are variable names to be searched + + @return List whos elements are tuple with variable name and raw offset + """ + lines = [] + try: + f = open(mapfilepath, 'r') + lines = f.readlines() + f.close() + except: + return None + + if len(lines) == 0: return None + firstline = lines[0].strip() + if (firstline.startswith("Archive member included ") and + firstline.endswith(" file (symbol)")): + return _parseForGCC(lines, efifilepath, varnames) + return _parseGeneral(lines, efifilepath, varnames) + +def _parseForGCC(lines, efifilepath, varnames): + """ Parse map file generated by GCC linker """ + status = 0 + sections = [] + varoffset = [] + for index, line in enumerate(lines): + line = line.strip() + # status machine transection + if status == 0 and line == "Memory Configuration": + status = 1 + continue + elif status == 1 and line == 'Linker script and memory map': + status = 2 + continue + elif status ==2 and line == 'START GROUP': + status = 3 + continue + + # status handler + if status == 3: + m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line) + if m != None: + sections.append(m.groups(0)) + for varname in varnames: + Str = '' + m = re.match("^.data.(%s)" % varname, line) + if m != None: + m = re.match(".data.(%s)$" % varname, line) + if m != None: + Str = lines[index + 1] + else: + Str = line[len(".data.%s" % varname):] + if Str: + m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip()) + if m != None: + varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0])) + + if not varoffset: + return [] + # get section information from efi file + efisecs = PeImageClass(efifilepath).SectionHeaderList + if efisecs == None or len(efisecs) == 0: + return [] + #redirection + redirection = 0 + for efisec in efisecs: + for section in sections: + if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text': + redirection = int(section[1], 16) - efisec[1] + + ret = [] + for var in varoffset: + for efisec in efisecs: + if var[1] >= efisec[1] and var[1] < efisec[1]+efisec[3]: + ret.append((var[0], hex(efisec[2] + var[1] - efisec[1] - redirection))) + return ret + +def _parseGeneral(lines, efifilepath, varnames): + status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table + secs = [] # key = section name + varoffset = [] + secRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\da-fA-F]+)[Hh]? +([.\w\$]+) +(\w+)', re.UNICODE) + symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.:\\\\\w\?@\$]+) +([\da-fA-F]+)', re.UNICODE) + + for line in lines: + line = line.strip() + if re.match("^Start[' ']+Length[' ']+Name[' ']+Class", line): + status = 1 + continue + if re.match("^Address[' ']+Publics by Value[' ']+Rva\+Base", line): + status = 2 + continue + if re.match("^entry point at", line): + status = 3 + continue + if status == 1 and len(line) != 0: + m = secRe.match(line) + assert m != None, "Fail to parse the section in map file , line is %s" % line + sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0) + secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class]) + if status == 2 and len(line) != 0: + for varname in varnames: + m = symRe.match(line) + assert m != None, "Fail to parse the symbol in map file, line is %s" % line + sec_no, sym_offset, sym_name, vir_addr = m.groups(0) + sec_no = int(sec_no, 16) + sym_offset = int(sym_offset, 16) + vir_addr = int(vir_addr, 16) + m2 = re.match('^[_]*(%s)' % varname, sym_name) + if m2 != None: + # fond a binary pcd entry in map file + for sec in secs: + if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]): + varoffset.append([varname, sec[3], sym_offset, vir_addr, sec_no]) + + if not varoffset: return [] + + # get section information from efi file + efisecs = PeImageClass(efifilepath).SectionHeaderList + if efisecs == None or len(efisecs) == 0: + return [] + + ret = [] + for var in varoffset: + index = 0 + for efisec in efisecs: + index = index + 1 + if var[1].strip() == efisec[0].strip(): + ret.append((var[0], hex(efisec[2] + var[2]))) + elif var[4] == index: + ret.append((var[0], hex(efisec[2] + var[2]))) + + return ret + ## Routine to process duplicated INF # # This function is called by following two cases: @@ -163,11 +302,11 @@ def ProcessVariableArgument(Option, OptionString, Value, Parser): def GuidStringToGuidStructureString(Guid): GuidList = Guid.split('-') Result = '{' - for Index in range(0,3,1): + for Index in range(0, 3, 1): Result = Result + '0x' + GuidList[Index] + ', ' Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4] - for Index in range(0,12,2): - Result = Result + ', 0x' + GuidList[4][Index:Index+2] + for Index in range(0, 12, 2): + Result = Result + ', 0x' + GuidList[4][Index:Index + 2] Result += '}}' return Result @@ -364,7 +503,7 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True): Fd.write(Content) Fd.close() except IOError, X: - EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X) + EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) return True @@ -483,7 +622,7 @@ class DirCache: # # @retval A list of all files # -def GetFiles(Root, SkipList=None, FullPath = True): +def GetFiles(Root, SkipList=None, FullPath=True): OriPath = Root FileList = [] for Root, Dirs, Files in os.walk(Root): @@ -533,7 +672,7 @@ def RealPath2(File, Dir='', OverrideDir=''): if OverrideDir[-1] == os.path.sep: return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)] else: - return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] + return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)] if GlobalData.gAllFiles: NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))] if not NewFile: @@ -545,7 +684,7 @@ def RealPath2(File, Dir='', OverrideDir=''): if Dir[-1] == os.path.sep: return NewFile[len(Dir):], NewFile[0:len(Dir)] else: - return NewFile[len(Dir)+1:], NewFile[0:len(Dir)] + return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)] else: return NewFile, '' @@ -571,7 +710,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource=' # Replace the default dir to current dir if Dir == '.': Dir = os.getcwd() - Dir = Dir[len(Workspace)+1:] + Dir = Dir[len(Workspace) + 1:] # First check if File has Edk definition itself if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1: @@ -610,7 +749,7 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.' # Dir is current module dir related to workspace if Dir == '.': Dir = os.getcwd() - Dir = Dir[len(Workspace)+1:] + Dir = Dir[len(Workspace) + 1:] NewFile = File RelaPath = AllFiles[os.path.normpath(Dir)] @@ -664,13 +803,18 @@ def GetRelPath(Path1, Path2): # # @param CName The CName of the GUID # @param PackageList List of packages looking-up in +# @param Inffile The driver file # # @retval GuidValue if the CName is found in any given package # @retval None if the CName is not found in all given packages # -def GuidValue(CName, PackageList): +def GuidValue(CName, PackageList, Inffile = None): for P in PackageList: - if CName in P.Guids: + GuidKeys = P.Guids.keys() + if Inffile and P._PrivateGuids: + if not Inffile.startswith(P.MetaFile.Dir): + GuidKeys = (dict.fromkeys(x for x in P.Guids if x not in P._PrivateGuids)).keys() + if CName in GuidKeys: return P.Guids[CName] return None @@ -678,13 +822,18 @@ def GuidValue(CName, PackageList): # # @param CName The CName of the GUID # @param PackageList List of packages looking-up in +# @param Inffile The driver file # # @retval GuidValue if the CName is found in any given package # @retval None if the CName is not found in all given packages # -def ProtocolValue(CName, PackageList): +def ProtocolValue(CName, PackageList, Inffile = None): for P in PackageList: - if CName in P.Protocols: + ProtocolKeys = P.Protocols.keys() + if Inffile and P._PrivateProtocols: + if not Inffile.startswith(P.MetaFile.Dir): + ProtocolKeys = (dict.fromkeys(x for x in P.Protocols if x not in P._PrivateProtocols)).keys() + if CName in ProtocolKeys: return P.Protocols[CName] return None @@ -692,13 +841,18 @@ def ProtocolValue(CName, PackageList): # # @param CName The CName of the GUID # @param PackageList List of packages looking-up in +# @param Inffile The driver file # # @retval GuidValue if the CName is found in any given package # @retval None if the CName is not found in all given packages # -def PpiValue(CName, PackageList): +def PpiValue(CName, PackageList, Inffile = None): for P in PackageList: - if CName in P.Ppis: + PpiKeys = P.Ppis.keys() + if Inffile and P._PrivatePpis: + if not Inffile.startswith(P.MetaFile.Dir): + PpiKeys = (dict.fromkeys(x for x in P.Ppis if x not in P._PrivatePpis)).keys() + if CName in PpiKeys: return P.Ppis[CName] return None @@ -735,7 +889,7 @@ class TemplateString(object): # # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint # - for PlaceHolder,Start,End in PlaceHolderList: + for PlaceHolder, Start, End in PlaceHolderList: self._SubSectionList.append(TemplateSection[SubSectionStart:Start]) self._SubSectionList.append(TemplateSection[Start:End]) self._PlaceHolderList.append(PlaceHolder) @@ -1121,11 +1275,11 @@ class tdict: if len(key) > 1: RestKeys = key[1:] elif self._Level_ > 1: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] else: FirstKey = key if self._Level_ > 1: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList: FirstKey = self._Wildcard @@ -1198,11 +1352,11 @@ class tdict: if len(key) > 1: RestKeys = key[1:] else: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] else: FirstKey = key if self._Level_ > 1: - RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)] if FirstKey in self._ValidWildcardList: FirstKey = self._Wildcard @@ -1267,32 +1421,7 @@ def ParseConsoleLog(Filename): Opr.close() Opw.close() -## AnalyzeDscPcd -# -# Analyze DSC PCD value, since there is no data type info in DSC -# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database -# 1. Feature flag: TokenSpace.PcdCName|PcdValue -# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize] -# 3. Dynamic default: -# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]] -# TokenSpace.PcdCName|PcdValue -# 4. Dynamic VPD: -# TokenSpace.PcdCName|VpdOffset[|VpdValue] -# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]] -# 5. Dynamic HII: -# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue] -# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which -# there might have "|" operator, also in string value. -# -# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped -# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII -# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL -# @retval: -# ValueList: A List contain fields described above -# IsValid: True if conforming EBNF, otherwise False -# Index: The index where PcdValue is in ValueList -# -def AnalyzeDscPcd(Setting, PcdType, DataType=''): +def AnalyzePcdExpression(Setting): Setting = Setting.strip() # There might be escaped quote in a string: \", \\\" Data = Setting.replace('\\\\', '//').replace('\\\"', '\\\'') @@ -1307,7 +1436,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): Pair += 1 elif ch == ')' and not InStr: Pair -= 1 - + if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT: NewStr += '-' else: @@ -1322,6 +1451,36 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): FieldList.append(Setting[StartPos:Pos].strip()) StartPos = Pos + 1 + return FieldList + +## AnalyzeDscPcd +# +# Analyze DSC PCD value, since there is no data type info in DSC +# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database +# 1. Feature flag: TokenSpace.PcdCName|PcdValue +# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize] +# 3. Dynamic default: +# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]] +# TokenSpace.PcdCName|PcdValue +# 4. Dynamic VPD: +# TokenSpace.PcdCName|VpdOffset[|VpdValue] +# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]] +# 5. Dynamic HII: +# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue] +# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which +# there might have "|" operator, also in string value. +# +# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped +# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII +# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL +# @retval: +# ValueList: A List contain fields described above +# IsValid: True if conforming EBNF, otherwise False +# Index: The index where PcdValue is in ValueList +# +def AnalyzeDscPcd(Setting, PcdType, DataType=''): + FieldList = AnalyzePcdExpression(Setting) + IsValid = True if PcdType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_FEATURE_FLAG): Value = FieldList[0] @@ -1361,7 +1520,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): IsValid = (len(FieldList) <= 3) else: IsValid = (len(FieldList) <= 1) - return [Value, Type, Size], IsValid, 0 + return [Value, Type, Size], IsValid, 0 elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD): VpdOffset = FieldList[0] Value = Size = '' @@ -1380,15 +1539,17 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): return [VpdOffset, Size, Value], IsValid, 2 elif PcdType in (MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII): HiiString = FieldList[0] - Guid = Offset = Value = '' + Guid = Offset = Value = Attribute = '' if len(FieldList) > 1: Guid = FieldList[1] if len(FieldList) > 2: Offset = FieldList[2] if len(FieldList) > 3: Value = FieldList[3] - IsValid = (3 <= len(FieldList) <= 4) - return [HiiString, Guid, Offset, Value], IsValid, 3 + if len(FieldList) > 4: + Attribute = FieldList[4] + IsValid = (3 <= len(FieldList) <= 5) + return [HiiString, Guid, Offset, Value, Attribute], IsValid, 3 return [], False, 0 ## AnalyzePcdData @@ -1400,17 +1561,17 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''): # # @retval ValueList: A List contain value, datum type and toke number. # -def AnalyzePcdData(Setting): - ValueList = ['', '', ''] - - ValueRe = re.compile(r'^\s*L?\".*\|.*\"') +def AnalyzePcdData(Setting): + ValueList = ['', '', ''] + + ValueRe = re.compile(r'^\s*L?\".*\|.*\"') PtrValue = ValueRe.findall(Setting) ValueUpdateFlag = False if len(PtrValue) >= 1: Setting = re.sub(ValueRe, '', Setting) - ValueUpdateFlag = True + ValueUpdateFlag = True TokenList = Setting.split(TAB_VALUE_SPLIT) ValueList[0:len(TokenList)] = TokenList @@ -1446,17 +1607,17 @@ def AnalyzeHiiPcdData(Setting): # # @retval ValueList: A List contain VpdOffset, MaxDatumSize and InitialValue. # -def AnalyzeVpdPcdData(Setting): - ValueList = ['', '', ''] - - ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') +def AnalyzeVpdPcdData(Setting): + ValueList = ['', '', ''] + + ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') PtrValue = ValueRe.findall(Setting) ValueUpdateFlag = False if len(PtrValue) >= 1: Setting = re.sub(ValueRe, '', Setting) - ValueUpdateFlag = True + ValueUpdateFlag = True TokenList = Setting.split(TAB_VALUE_SPLIT) ValueList[0:len(TokenList)] = TokenList @@ -1472,12 +1633,12 @@ def AnalyzeVpdPcdData(Setting): # def CheckPcdDatum(Type, Value): if Type == "VOID*": - ValueRe = re.compile(r'\s*L?\".*\"\s*$') + ValueRe = re.compile(r'\s*L?\".*\"\s*$') if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"')) or (Value.startswith('{') and Value.endswith('}')) ): return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\ - ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) + ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) elif ValueRe.match(Value): # Check the chars in UnicodeString or CString is printable if Value.startswith("L"): @@ -1530,7 +1691,7 @@ def SplitOption(OptionString): if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]: if Index > OptionStart: - OptionList.append(OptionString[OptionStart:Index-1]) + OptionList.append(OptionString[OptionStart:Index - 1]) OptionStart = Index LastChar = CurrentChar OptionList.append(OptionString[OptionStart:]) @@ -1597,6 +1758,7 @@ class PathClass(object): # Remove any '.' and '..' in path if self.Root: + self.Root = mws.getWs(self.Root, self.File) self.Path = os.path.normpath(os.path.join(self.Root, self.File)) self.Root = os.path.normpath(CommonPath([self.Root, self.Path])) # eliminate the side-effect of 'C:' @@ -1606,7 +1768,7 @@ class PathClass(object): if self.Root[-1] == os.path.sep: self.File = self.Path[len(self.Root):] else: - self.File = self.Path[len(self.Root)+1:] + self.File = self.Path[len(self.Root) + 1:] else: self.Path = os.path.normpath(self.File) @@ -1707,7 +1869,10 @@ class PathClass(object): RealFile = os.path.join(self.AlterRoot, self.File) elif self.Root: RealFile = os.path.join(self.Root, self.File) - return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile) + if len (mws.getPkgPath()) == 0: + return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile) + else: + return FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.File, '\n\t'.join(mws.getPkgPath())) ErrorCode = 0 ErrorInfo = '' @@ -1825,17 +1990,26 @@ class SkuClass(): self.AvailableSkuIds = sdict() self.SkuIdSet = [] - + self.SkuIdNumberSet = [] if SkuIdentifier == '' or SkuIdentifier is None: self.SkuIdSet = ['DEFAULT'] + self.SkuIdNumberSet = ['0U'] elif SkuIdentifier == 'ALL': self.SkuIdSet = SkuIds.keys() + self.SkuIdNumberSet = [num.strip() + 'U' for num in SkuIds.values()] else: r = SkuIdentifier.split('|') self.SkuIdSet=[r[k].strip() for k in range(len(r))] + k = None + try: + self.SkuIdNumberSet = [SkuIds[k].strip() + 'U' for k in self.SkuIdSet] + except Exception: + EdkLogger.error("build", PARAMETER_INVALID, + ExtraData = "SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]" + % (k, " ".join(SkuIds.keys()))) if len(self.SkuIdSet) == 2 and 'DEFAULT' in self.SkuIdSet and SkuIdentifier != 'ALL': self.SkuIdSet.remove('DEFAULT') - + self.SkuIdNumberSet.remove('0U') for each in self.SkuIdSet: if each in SkuIds: self.AvailableSkuIds[each] = SkuIds[each] @@ -1862,10 +2036,31 @@ class SkuClass(): return self.SkuIdSet[0] else: return 'DEFAULT' - + def __GetAvailableSkuIdNumber(self): + return self.SkuIdNumberSet SystemSkuId = property(__GetSystemSkuID) AvailableSkuIdSet = property(__GetAvailableSkuIds) SkuUsageType = property(__SkuUsageType) + AvailableSkuIdNumSet = property(__GetAvailableSkuIdNumber) + +# +# Pack a registry format GUID +# +def PackRegistryFormatGuid(Guid): + Guid = Guid.split('-') + return pack('=LHHBBBBBBBB', + int(Guid[0], 16), + int(Guid[1], 16), + int(Guid[2], 16), + int(Guid[3][-4:-2], 16), + int(Guid[3][-2:], 16), + int(Guid[4][-12:-10], 16), + int(Guid[4][-10:-8], 16), + int(Guid[4][-8:-6], 16), + int(Guid[4][-6:-4], 16), + int(Guid[4][-4:-2], 16), + int(Guid[4][-2:], 16) + ) ## #