From 86e6cf98a8493574878286522078050ac4dd505d Mon Sep 17 00:00:00 2001 From: Yunhua Feng Date: Thu, 11 Oct 2018 11:20:59 +0800 Subject: [PATCH] BaseTools: Handle the bytes and str difference Deal with bytes and str is different, remove the unicode() Using utcfromtimestamp instead of fromtimestamp. Cc: Liming Gao Cc: Yonghong Zhu Contributed-under: TianoCore Contribution Agreement 1.1 Signed-off-by: Yunhua Feng Reviewed-by: Liming Gao --- BaseTools/Source/Python/AutoGen/AutoGen.py | 30 +++++++++---------- BaseTools/Source/Python/AutoGen/GenC.py | 6 ++-- BaseTools/Source/Python/AutoGen/GenPcdDb.py | 22 +++++++------- BaseTools/Source/Python/AutoGen/GenVar.py | 22 +++++++------- BaseTools/Source/Python/AutoGen/StrGather.py | 2 ++ .../Source/Python/AutoGen/UniClassObject.py | 14 +-------- .../Python/AutoGen/ValidCheckingInfoObject.py | 2 +- BaseTools/Source/Python/BPDG/GenVpd.py | 7 ++--- .../Source/Python/Common/LongFilePathOs.py | 3 +- .../Python/Common/LongFilePathSupport.py | 14 +-------- BaseTools/Source/Python/Common/Misc.py | 9 ++---- BaseTools/Source/Python/Common/StringUtils.py | 10 ++----- BaseTools/Source/Python/Common/VpdInfoFile.py | 10 +++---- .../Source/Python/GenFds/AprioriSection.py | 2 +- BaseTools/Source/Python/GenFds/Capsule.py | 5 ++-- BaseTools/Source/Python/GenFds/CapsuleData.py | 2 +- BaseTools/Source/Python/GenFds/Fd.py | 4 +-- .../Source/Python/GenFds/FfsFileStatement.py | 14 ++++----- .../Source/Python/GenFds/FfsInfStatement.py | 8 ++--- BaseTools/Source/Python/GenFds/Fv.py | 9 +++--- .../Source/Python/GenFds/FvImageSection.py | 4 +-- BaseTools/Source/Python/GenFds/GenFds.py | 5 ++-- .../Python/GenFds/GenFdsGlobalVariable.py | 4 +-- BaseTools/Source/Python/GenFds/Region.py | 6 ++-- .../Source/Python/Pkcs7Sign/Pkcs7Sign.py | 2 +- .../Rsa2048Sha256GenerateKeys.py | 14 ++++----- .../Rsa2048Sha256Sign/Rsa2048Sha256Sign.py | 11 +++---- BaseTools/Source/Python/Trim/Trim.py | 9 ++---- .../Source/Python/UPT/Library/StringUtils.py | 4 +-- .../Source/Python/Workspace/DscBuildData.py | 13 ++++++-- .../Source/Python/Workspace/MetaFileParser.py | 4 +-- BaseTools/Source/Python/build/BuildReport.py | 10 +++---- BaseTools/Source/Python/build/build.py | 12 ++++---- 33 files changed, 131 insertions(+), 162 deletions(-) diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py index 2815091de8..1e5f86937d 100644 --- a/BaseTools/Source/Python/AutoGen/AutoGen.py +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -661,7 +661,7 @@ class WorkspaceAutoGen(AutoGen): for files in AllWorkSpaceMetaFiles: if files.endswith('.dec'): continue - f = open(files, 'r') + f = open(files, 'rb') Content = f.read() f.close() m.update(Content) @@ -690,7 +690,7 @@ class WorkspaceAutoGen(AutoGen): HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash') m = hashlib.md5() # Get .dec file's hash value - f = open(Pkg.MetaFile.Path, 'r') + f = open(Pkg.MetaFile.Path, 'rb') Content = f.read() f.close() m.update(Content) @@ -700,7 +700,7 @@ class WorkspaceAutoGen(AutoGen): for Root, Dirs, Files in os.walk(str(inc)): for File in sorted(Files): File_Path = os.path.join(Root, File) - f = open(File_Path, 'r') + f = open(File_Path, 'rb') Content = f.read() f.close() m.update(Content) @@ -1602,7 +1602,7 @@ class PlatformAutoGen(AutoGen): for pcd in self._DynamicPcdList: if len(pcd.SkuInfoList) == 1: for (SkuName, SkuId) in allskuset: - if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0: + if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0: continue pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT]) pcd.SkuInfoList[SkuName].SkuId = SkuId @@ -3207,7 +3207,7 @@ class ModuleAutoGen(AutoGen): AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir) RetVal[AutoFile] = str(StringH) self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) - if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "": + if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"": AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir) RetVal[AutoFile] = UniStringBinBuffer.getvalue() AutoFile.IsBinary = True @@ -3218,7 +3218,7 @@ class ModuleAutoGen(AutoGen): AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir) RetVal[AutoFile] = str(StringIdf) self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) - if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "": + if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"": AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir) RetVal[AutoFile] = IdfGenBinBuffer.getvalue() AutoFile.IsBinary = True @@ -3449,7 +3449,7 @@ class ModuleAutoGen(AutoGen): EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None) # Use a instance of BytesIO to cache data - fStringIO = BytesIO('') + fStringIO = BytesIO() for Item in VfrUniOffsetList: if (Item[0].find("Strings") != -1): @@ -3459,8 +3459,7 @@ class ModuleAutoGen(AutoGen): # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] - UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] - fStringIO.write(''.join(UniGuid)) + fStringIO.write(bytes(UniGuid)) UniValue = pack ('Q', int (Item[1], 16)) fStringIO.write (UniValue) else: @@ -3470,8 +3469,7 @@ class ModuleAutoGen(AutoGen): # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] - VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] - fStringIO.write(''.join(VfrGuid)) + fStringIO.write(bytes(VfrGuid)) VfrValue = pack ('Q', int (Item[1], 16)) fStringIO.write (VfrValue) # @@ -4019,29 +4017,29 @@ class ModuleAutoGen(AutoGen): GlobalData.gModuleHash[self.Arch] = {} m = hashlib.md5() # Add Platform level hash - m.update(GlobalData.gPlatformHash) + m.update(GlobalData.gPlatformHash.encode('utf-8')) # Add Package level hash if self.DependentPackageList: for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName): if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]: - m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName]) + m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8')) # Add Library hash if self.LibraryAutoGenList: for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name): if Lib.Name not in GlobalData.gModuleHash[self.Arch]: Lib.GenModuleHash() - m.update(GlobalData.gModuleHash[self.Arch][Lib.Name]) + m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8')) # Add Module self - f = open(str(self.MetaFile), 'r') + f = open(str(self.MetaFile), 'rb') Content = f.read() f.close() m.update(Content) # Add Module's source files if self.SourceFileList: for File in sorted(self.SourceFileList, key=lambda x: str(x)): - f = open(str(File), 'r') + f = open(str(File), 'rb') Content = f.read() f.close() m.update(Content) diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py index c189269d4d..28539d8239 100644 --- a/BaseTools/Source/Python/AutoGen/GenC.py +++ b/BaseTools/Source/Python/AutoGen/GenC.py @@ -1795,7 +1795,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer): TempBuffer += Buffer elif File.Ext.upper() == '.JPG': ImageType, = struct.unpack('4s', Buffer[6:10]) - if ImageType != 'JFIF': + if ImageType != b'JFIF': EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path) TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG) TempBuffer += pack('I', len(Buffer)) @@ -1895,7 +1895,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer): def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): ImageType, = struct.unpack('2s', Buffer[0:2]) - if ImageType!= 'BM': # BMP file type is 'BM' + if ImageType!= b'BM': # BMP file type is 'BM' EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path) BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant']) BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII') @@ -1967,7 +1967,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): for Index in range(0, len(PaletteBuffer)): if Index % 4 == 3: continue - PaletteTemp += PaletteBuffer[Index] + PaletteTemp += bytes([PaletteBuffer[Index]]) PaletteBuffer = PaletteTemp[1:] return ImageBuffer, PaletteBuffer diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py index f99d9f5d18..26017c6544 100644 --- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py +++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py @@ -291,7 +291,7 @@ class DbItemList: PackStr = PACK_CODE_BY_SIZE[self.ItemSize] - Buffer = '' + Buffer = bytearray() for Datas in self.RawDataList: if type(Datas) in (list, tuple): for Data in Datas: @@ -316,7 +316,7 @@ class DbExMapTblItemList (DbItemList): DbItemList.__init__(self, ItemSize, DataList, RawDataList) def PackData(self): - Buffer = '' + Buffer = bytearray() PackStr = "=LHH" for Datas in self.RawDataList: Buffer += pack(PackStr, @@ -365,7 +365,7 @@ class DbComItemList (DbItemList): def PackData(self): PackStr = PACK_CODE_BY_SIZE[self.ItemSize] - Buffer = '' + Buffer = bytearray() for DataList in self.RawDataList: for Data in DataList: if type(Data) in (list, tuple): @@ -386,7 +386,7 @@ class DbVariableTableItemList (DbComItemList): def PackData(self): PackStr = "=LLHHLHH" - Buffer = '' + Buffer = bytearray() for DataList in self.RawDataList: for Data in DataList: Buffer += pack(PackStr, @@ -447,7 +447,7 @@ class DbSkuHeadTableItemList (DbItemList): def PackData(self): PackStr = "=LL" - Buffer = '' + Buffer = bytearray() for Data in self.RawDataList: Buffer += pack(PackStr, GetIntegerValue(Data[0]), @@ -469,7 +469,7 @@ class DbSizeTableItemList (DbItemList): return length * self.ItemSize def PackData(self): PackStr = "=H" - Buffer = '' + Buffer = bytearray() for Data in self.RawDataList: Buffer += pack(PackStr, GetIntegerValue(Data[0])) @@ -849,7 +849,7 @@ def BuildExDataBase(Dict): Index = 0 for Item in DbItemTotal: Index +=1 - b = Item.PackData() + b = bytes(Item.PackData()) Buffer += b if Index == InitTableNum: if len(Buffer) % 8: @@ -917,9 +917,9 @@ def CreatePcdDataBase(PcdDBData): totallenbuff = pack("=L", totallen) newbuffer = databasebuff[:32] for i in range(4): - newbuffer += totallenbuff[i] + newbuffer += bytes([totallenbuff[i]]) for i in range(36, totallen): - newbuffer += databasebuff[i] + newbuffer += bytes([databasebuff[i]]) return newbuffer @@ -962,7 +962,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase): AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase) final_data = () for item in PcdDbBuffer: - final_data += unpack("B", item) + final_data += unpack("B", bytes([item])) PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data) PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC) VarCheckTableData[(skuname, skuid)] = VarCheckTab @@ -975,7 +975,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase): AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase) final_data = () for item in PcdDbBuffer: - final_data += unpack("B", item) + final_data += unpack("B", bytes([item])) PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data) return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData) diff --git a/BaseTools/Source/Python/AutoGen/GenVar.py b/BaseTools/Source/Python/AutoGen/GenVar.py index f7806b1dd3..d06fbe7e17 100644 --- a/BaseTools/Source/Python/AutoGen/GenVar.py +++ b/BaseTools/Source/Python/AutoGen/GenVar.py @@ -66,7 +66,7 @@ class VariableMgr(object): data = value_list[0] value_list = [] for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)): - value_list.append(hex(unpack("B", data_byte)[0])) + value_list.append(hex(unpack("B", bytes([data_byte]))[0])) newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list try: newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}" @@ -87,7 +87,7 @@ class VariableMgr(object): data = value_list[0] value_list = [] for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)): - value_list.append(hex(unpack("B", data_byte)[0])) + value_list.append(hex(unpack("B", bytes([data_byte]))[0])) newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine) for offset in newvalue: value_list,itemPcdname,itemPcdDscLine = newvalue[offset] @@ -161,7 +161,7 @@ class VariableMgr(object): default_data_array = () for item in default_data_buffer: - default_data_array += unpack("B", item) + default_data_array += unpack("B", bytes([item])) var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)]) @@ -179,7 +179,7 @@ class VariableMgr(object): others_data_array = () for item in others_data_buffer: - others_data_array += unpack("B", item) + others_data_array += unpack("B", bytes([item])) data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array) @@ -195,7 +195,7 @@ class VariableMgr(object): return [] pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {}) - NvStoreDataBuffer = "" + NvStoreDataBuffer = bytearray() var_data_offset = collections.OrderedDict() offset = NvStorageHeaderSize for default_data, default_info in pcds_default_data.values(): @@ -222,7 +222,7 @@ class VariableMgr(object): nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8) - data_delta_structure_buffer = "" + data_delta_structure_buffer = bytearray() for skuname, defaultstore in var_data: if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT): continue @@ -254,7 +254,7 @@ class VariableMgr(object): def unpack_data(data): final_data = () for item in data: - final_data += unpack("B", item) + final_data += unpack("B", bytes([item])) return final_data @staticmethod @@ -322,7 +322,7 @@ class VariableMgr(object): @staticmethod def PACK_VARIABLES_DATA(var_value,data_type, tail = None): - Buffer = "" + Buffer = bytearray() data_len = 0 if data_type == DataType.TAB_VOID: for value_char in var_value.strip("{").strip("}").split(","): @@ -352,7 +352,7 @@ class VariableMgr(object): @staticmethod def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value): - Buffer = "" + Buffer = bytearray() Buffer += pack("=L", 4+8+8) Buffer += pack("=Q", int(skuid)) Buffer += pack("=Q", int(defaultstoragename)) @@ -377,7 +377,7 @@ class VariableMgr(object): def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list): skuid = self.GetSkuId(skuname) defaultstorageid = self.GetDefaultStoreId(defaultstoragename) - Buffer = "" + Buffer = bytearray() Buffer += pack("=L", 4+8+8) Buffer += pack("=Q", int(skuid)) Buffer += pack("=Q", int(defaultstorageid)) @@ -400,7 +400,7 @@ class VariableMgr(object): @staticmethod def PACK_VARIABLE_NAME(var_name): - Buffer = "" + Buffer = bytearray() for name_char in var_name.strip("{").strip("}").split(","): Buffer += pack("=B", int(name_char, 16)) diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py index 3f384db1df..05e670ac06 100644 --- a/BaseTools/Source/Python/AutoGen/StrGather.py +++ b/BaseTools/Source/Python/AutoGen/StrGather.py @@ -122,6 +122,8 @@ def DecToHexList(Dec, Digit = 8): # @retval: A list for formatted hex string # def AscToHexList(Ascii): + if isinstance(Ascii, bytes): + return ['0x{0:02X}'.format(Item) for Item in Ascii] return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii] ## Create content of .h file diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py index b435ffa569..1c86be4429 100644 --- a/BaseTools/Source/Python/AutoGen/UniClassObject.py +++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py @@ -45,18 +45,6 @@ BACK_SLASH_PLACEHOLDER = u'\u0006' gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE) -## Convert a python unicode string to a normal string -# -# Convert a python unicode string to a normal string -# UniToStr(u'I am a string') is 'I am a string' -# -# @param Uni: The python unicode string -# -# @retval: The formatted normal string -# -def UniToStr(Uni): - return repr(Uni)[2:-1] - ## Convert a unicode string to a Hex list # # Convert a unicode string to a Hex list @@ -438,7 +426,7 @@ class UniFileClassObject(object): if EndPos != -1 and EndPos - StartPos == 6 : if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE): EndStr = Line[EndPos: ] - UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape') + UniStr = Line[StartPos + 2: EndPos] if EndStr.startswith(u'\\x') and len(EndStr) >= 7: if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE): Line = Line[0 : StartPos] + UniStr + EndStr diff --git a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py index 379d404c76..77518fc1cc 100644 --- a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py +++ b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py @@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object): os.mkdir(dest) BinFileName = "PcdVarCheck.bin" BinFilePath = os.path.join(dest, BinFileName) - Buffer = '' + Buffer = bytearray() index = 0 for var_check_tab in self.var_check_info: index += 1 diff --git a/BaseTools/Source/Python/BPDG/GenVpd.py b/BaseTools/Source/Python/BPDG/GenVpd.py index bd2c05d782..28bfde1151 100644 --- a/BaseTools/Source/Python/BPDG/GenVpd.py +++ b/BaseTools/Source/Python/BPDG/GenVpd.py @@ -185,7 +185,7 @@ class PcdEntry: EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno)) try: - self.PcdValue = pack('%ds' % Size, ValueString) + self.PcdValue = pack('%ds' % Size, bytes(ValueString, 'utf-8')) except: EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) @@ -656,7 +656,7 @@ class GenVPD : EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None) # Use a instance of BytesIO to cache data - fStringIO = BytesIO('') + fStringIO = BytesIO() # Write the header of map file. try : @@ -674,8 +674,7 @@ class GenVPD : # Write Vpd binary file fStringIO.seek (eachPcd.PcdBinOffset) if isinstance(eachPcd.PcdValue, list): - ValueList = [chr(Item) for Item in eachPcd.PcdValue] - fStringIO.write(''.join(ValueList)) + fStringIO.write(bytes(eachPcd.PcdValue)) else: fStringIO.write (eachPcd.PcdValue) diff --git a/BaseTools/Source/Python/Common/LongFilePathOs.py b/BaseTools/Source/Python/Common/LongFilePathOs.py index 5e409e2030..12796b2682 100644 --- a/BaseTools/Source/Python/Common/LongFilePathOs.py +++ b/BaseTools/Source/Python/Common/LongFilePathOs.py @@ -14,7 +14,6 @@ import os from . import LongFilePathOsPath from Common.LongFilePathSupport import LongFilePath -from Common.LongFilePathSupport import UniToStr import time path = LongFilePathOsPath @@ -63,7 +62,7 @@ def listdir(path): List = [] uList = os.listdir(u"%s" % LongFilePath(path)) for Item in uList: - List.append(UniToStr(Item)) + List.append(Item) return List environ = os.environ diff --git a/BaseTools/Source/Python/Common/LongFilePathSupport.py b/BaseTools/Source/Python/Common/LongFilePathSupport.py index b3e3c8ea64..7eb141053a 100644 --- a/BaseTools/Source/Python/Common/LongFilePathSupport.py +++ b/BaseTools/Source/Python/Common/LongFilePathSupport.py @@ -1,7 +1,7 @@ ## @file # Override built in function file.open to provide support for long file path # -# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.
# This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -49,15 +49,3 @@ def CopyLongFilePath(src, dst): with open(LongFilePath(src), 'rb') as fsrc: with open(LongFilePath(dst), 'wb') as fdst: shutil.copyfileobj(fsrc, fdst) - -## Convert a python unicode string to a normal string -# -# Convert a python unicode string to a normal string -# UniToStr(u'I am a string') is 'I am a string' -# -# @param Uni: The python unicode string -# -# @retval: The formatted normal string -# -def UniToStr(Uni): - return repr(Uni)[2:-1] diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py index 1d62a8b56b..2253b67af6 100644 --- a/BaseTools/Source/Python/Common/Misc.py +++ b/BaseTools/Source/Python/Common/Misc.py @@ -454,9 +454,6 @@ def RemoveDirectory(Directory, Recursively=False): # @retval False If the file content is the same # def SaveFileOnChange(File, Content, IsBinaryFile=True): - if not IsBinaryFile: - Content = Content.replace("\n", os.linesep) - if os.path.exists(File): try: if isinstance(Content, bytes): @@ -1308,7 +1305,7 @@ def ParseDevPathValue (Value): if err: raise BadExpression("DevicePath: %s" % str(err)) Size = len(out.split()) - out = ','.join(out.split()) + out = ','.join(out.decode(encoding='utf-8', errors='ignore').split()) return '{' + out + '}', Size def ParseFieldValue (Value): @@ -1347,7 +1344,7 @@ def ParseFieldValue (Value): if Value[0] == '"' and Value[-1] == '"': Value = Value[1:-1] try: - Value = "'" + uuid.UUID(Value).get_bytes_le() + "'" + Value = "{" + ','.join([str(i) for i in uuid.UUID(Value).bytes_le]) + "}" except ValueError as Message: raise BadExpression(Message) Value, Size = ParseFieldValue(Value) @@ -1871,7 +1868,7 @@ class PeImageClass(): ByteArray = array.array('B') ByteArray.fromfile(PeObject, 4) # PE signature should be 'PE\0\0' - if ByteArray.tostring() != 'PE\0\0': + if ByteArray.tostring() != b'PE\0\0': self.ErrorInfo = self.FileName + ' has no valid PE signature PE00' return diff --git a/BaseTools/Source/Python/Common/StringUtils.py b/BaseTools/Source/Python/Common/StringUtils.py index 005dbd09bb..f667c4c916 100644 --- a/BaseTools/Source/Python/Common/StringUtils.py +++ b/BaseTools/Source/Python/Common/StringUtils.py @@ -815,11 +815,7 @@ def GetHelpTextList(HelpTextClassList): return List def StringToArray(String): - if isinstance(String, unicode): - if len(unicode) == 0: - return "{0x00,0x00}" - return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String) - elif String.startswith('L"'): + if String.startswith('L"'): if String == "L\"\"": return "{0x00,0x00}" else: @@ -842,9 +838,7 @@ def StringToArray(String): return '{%s,0,0}' % ','.join(String.split()) def StringArrayLength(String): - if isinstance(String, unicode): - return (len(String) + 1) * 2 + 1; - elif String.startswith('L"'): + if String.startswith('L"'): return (len(String) - 3 + 1) * 2 elif String.startswith('"'): return (len(String) - 2 + 1) diff --git a/BaseTools/Source/Python/Common/VpdInfoFile.py b/BaseTools/Source/Python/Common/VpdInfoFile.py index bae184f040..3fadd693ea 100644 --- a/BaseTools/Source/Python/Common/VpdInfoFile.py +++ b/BaseTools/Source/Python/Common/VpdInfoFile.py @@ -91,18 +91,18 @@ class VpdInfoFile: if (Vpd is None): EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.") - if not (Offset >= 0 or Offset == "*"): + if not (Offset >= "0" or Offset == "*"): EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset) if Vpd.DatumType == TAB_VOID: - if Vpd.MaxDatumSize <= 0: + if Vpd.MaxDatumSize <= "0": EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)) elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES: if not Vpd.MaxDatumSize: Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType] else: - if Vpd.MaxDatumSize <= 0: + if Vpd.MaxDatumSize <= "0": EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)) @@ -126,7 +126,7 @@ class VpdInfoFile: "Invalid parameter FilePath: %s." % FilePath) Content = FILE_COMMENT_TEMPLATE - Pcds = sorted(self._VpdArray.keys()) + Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName) for Pcd in Pcds: i = 0 PcdTokenCName = Pcd.TokenCName @@ -248,7 +248,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName): except Exception as X: EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X)) (out, error) = PopenObject.communicate() - print(out) + print(out.decode(encoding='utf-8', errors='ignore')) while PopenObject.returncode is None : PopenObject.wait() diff --git a/BaseTools/Source/Python/GenFds/AprioriSection.py b/BaseTools/Source/Python/GenFds/AprioriSection.py index c50a0c5b02..13142ef7c0 100644 --- a/BaseTools/Source/Python/GenFds/AprioriSection.py +++ b/BaseTools/Source/Python/GenFds/AprioriSection.py @@ -51,7 +51,7 @@ class AprioriSection (AprioriSectionClassObject): def GenFfs (self, FvName, Dict = {}, IsMakefile = False): DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881" PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6" - Buffer = BytesIO('') + Buffer = BytesIO() AprioriFileGuid = DXE_GUID if self.AprioriType == "PEI": AprioriFileGuid = PEI_GUID diff --git a/BaseTools/Source/Python/GenFds/Capsule.py b/BaseTools/Source/Python/GenFds/Capsule.py index 238cb4e918..1c673949fa 100644 --- a/BaseTools/Source/Python/GenFds/Capsule.py +++ b/BaseTools/Source/Python/GenFds/Capsule.py @@ -21,6 +21,7 @@ from CommonDataClass.FdfClass import CapsuleClassObject import Common.LongFilePathOs as os import subprocess from io import BytesIO +from io import StringIO from Common.Misc import SaveFileOnChange from Common.Misc import PackRegistryFormatGuid import uuid @@ -184,7 +185,7 @@ class Capsule (CapsuleClassObject) : # # The real capsule header structure is 28 bytes # - Header.write('\x00'*(HdrSize-28)) + Header.write(b'\x00'*(HdrSize-28)) Header.write(FwMgrHdr.getvalue()) Header.write(Content.getvalue()) # @@ -246,7 +247,7 @@ class Capsule (CapsuleClassObject) : def GenCapInf(self): self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName + "_Cap" + '.inf') - CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+') + CapInfFile = StringIO() #open (self.CapInfFileName , 'w+') CapInfFile.writelines("[options]" + T_CHAR_LF) diff --git a/BaseTools/Source/Python/GenFds/CapsuleData.py b/BaseTools/Source/Python/GenFds/CapsuleData.py index 1a592189b3..651a16ec75 100644 --- a/BaseTools/Source/Python/GenFds/CapsuleData.py +++ b/BaseTools/Source/Python/GenFds/CapsuleData.py @@ -82,7 +82,7 @@ class CapsuleFv (CapsuleData): if self.FvName.find('.fv') == -1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict: FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()] - FdBuffer = BytesIO('') + FdBuffer = BytesIO() FvObj.CapsuleName = self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None diff --git a/BaseTools/Source/Python/GenFds/Fd.py b/BaseTools/Source/Python/GenFds/Fd.py index bb58928afa..0ba6decc9e 100644 --- a/BaseTools/Source/Python/GenFds/Fd.py +++ b/BaseTools/Source/Python/GenFds/Fd.py @@ -74,7 +74,7 @@ class FD(FDClassObject): HasCapsuleRegion = True break if HasCapsuleRegion: - TempFdBuffer = BytesIO('') + TempFdBuffer = BytesIO() PreviousRegionStart = -1 PreviousRegionSize = 1 @@ -103,7 +103,7 @@ class FD(FDClassObject): GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function') RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.vtfRawDict, self.DefineVarDict) - FdBuffer = BytesIO('') + FdBuffer = BytesIO() PreviousRegionStart = -1 PreviousRegionSize = 1 for RegionObj in self.RegionList : diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py index 558a3f75c0..3daf75b205 100644 --- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py +++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py @@ -82,7 +82,7 @@ class FileStatement (FileStatementClassObject) : Dict.update(self.DefineVarDict) SectionAlignments = None if self.FvName is not None : - Buffer = BytesIO('') + Buffer = BytesIO() if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict: EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName)) Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) @@ -99,7 +99,7 @@ class FileStatement (FileStatementClassObject) : elif self.FileName is not None: if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW': if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment): - FileContent = '' + FileContent = BytesIO() MaxAlignIndex = 0 MaxAlignValue = 1 for Index, File in enumerate(self.FileName): @@ -115,15 +115,15 @@ class FileStatement (FileStatementClassObject) : if AlignValue > MaxAlignValue: MaxAlignIndex = Index MaxAlignValue = AlignValue - FileContent += Content - if len(FileContent) % AlignValue != 0: + FileContent.write(Content) + if len(FileContent.getvalue()) % AlignValue != 0: Size = AlignValue - len(FileContent) % AlignValue for i in range(0, Size): - FileContent += pack('B', 0xFF) + FileContent.write(pack('B', 0xFF)) - if FileContent: + if FileContent.getvalue() != b'': OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw') - SaveFileOnChange(OutputRAWFile, FileContent, True) + SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True) self.FileName = OutputRAWFile self.SubAlignment = self.SubAlignment[MaxAlignIndex] diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py index 43b9c90b77..016ee0b9a2 100644 --- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py +++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py @@ -1086,7 +1086,7 @@ class FfsInfStatement(FfsInfStatementClassObject): def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName): # Use a instance of StringIO to cache data - fStringIO = BytesIO('') + fStringIO = BytesIO() for Item in VfrUniOffsetList: if (Item[0].find("Strings") != -1): @@ -1096,8 +1096,7 @@ class FfsInfStatement(FfsInfStatementClassObject): # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] - UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] - fStringIO.write(''.join(UniGuid)) + fStringIO.write(bytes(UniGuid)) UniValue = pack ('Q', int (Item[1], 16)) fStringIO.write (UniValue) else: @@ -1107,8 +1106,7 @@ class FfsInfStatement(FfsInfStatementClassObject): # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] - VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] - fStringIO.write(''.join(VfrGuid)) + fStringIO.write(bytes(VfrGuid)) type (Item[1]) VfrValue = pack ('Q', int (Item[1], 16)) fStringIO.write (VfrValue) diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py index 2bca6d7341..7653cf692b 100644 --- a/BaseTools/Source/Python/GenFds/Fv.py +++ b/BaseTools/Source/Python/GenFds/Fv.py @@ -18,6 +18,7 @@ import Common.LongFilePathOs as os import subprocess from io import BytesIO +from io import StringIO from struct import * from . import Ffs @@ -204,7 +205,7 @@ class FV (FvClassObject): # PI FvHeader is 0x48 byte FvHeaderBuffer = FvFileObj.read(0x48) # FV alignment position. - FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) + FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F) if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x100000: if FvAlignmentValue >= 0x1000000: @@ -264,7 +265,7 @@ class FV (FvClassObject): # self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.inf') - self.FvInfFile = BytesIO() + self.FvInfFile = StringIO() # # Add [Options] @@ -339,7 +340,7 @@ class FV (FvClassObject): GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName)) else: TotalSize = 16 + 4 - Buffer = '' + Buffer = bytearray() if self.UsedSizeEnable: TotalSize += (4 + 4) ## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03 @@ -366,7 +367,7 @@ class FV (FvClassObject): # Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002) + PackGUID(Guid) - + self.UiFvName) + + bytes(self.UiFvName, 'utf-8')) for Index in range (0, len(self.FvExtEntryType)): if self.FvExtEntryType[Index] == 'FILE': diff --git a/BaseTools/Source/Python/GenFds/FvImageSection.py b/BaseTools/Source/Python/GenFds/FvImageSection.py index 3278a8befa..5f1b42b078 100644 --- a/BaseTools/Source/Python/GenFds/FvImageSection.py +++ b/BaseTools/Source/Python/GenFds/FvImageSection.py @@ -98,7 +98,7 @@ class FvImageSection(FvImageSectionClassObject): # Generate Fv # if self.FvName is not None: - Buffer = BytesIO('') + Buffer = BytesIO() Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName) if Fv is not None: self.Fv = Fv @@ -118,7 +118,7 @@ class FvImageSection(FvImageSectionClassObject): # PI FvHeader is 0x48 byte FvHeaderBuffer = FvFileObj.read(0x48) # FV alignment position. - FvAlignmentValue = 1 << (ord (FvHeaderBuffer[0x2E]) & 0x1F) + FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F) # FvAlignmentValue is larger than or equal to 1K if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x100000: diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py index 904bd84089..e135139bc1 100644 --- a/BaseTools/Source/Python/GenFds/GenFds.py +++ b/BaseTools/Source/Python/GenFds/GenFds.py @@ -27,6 +27,7 @@ from Workspace.BuildClassObject import PcdClassObject from . import RuleComplexFile from .EfiSection import EfiSection from io import BytesIO +from io import StringIO import Common.TargetTxtClassObject as TargetTxtClassObject import Common.ToolDefClassObject as ToolDefClassObject from Common.DataType import * @@ -454,7 +455,7 @@ class GenFds : return elif GenFds.OnlyGenerateThisFv is None: for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values(): - Buffer = BytesIO('') + Buffer = BytesIO() FvObj.AddToBuffer(Buffer) Buffer.close() @@ -600,7 +601,7 @@ class GenFds : def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj): GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref") - GuidXRefFile = BytesIO('') + GuidXRefFile = StringIO('') PkgGuidDict = {} GuidDict = {} ModuleList = [] diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py index 1f3ca10779..052736b9d8 100644 --- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py +++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py @@ -720,8 +720,8 @@ class GenFdsGlobalVariable: return if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode) - GenFdsGlobalVariable.InfLogger (out) - GenFdsGlobalVariable.InfLogger (error) + GenFdsGlobalVariable.InfLogger (out.decode(encoding='utf-8',errors='ignore')) + GenFdsGlobalVariable.InfLogger (error.decode(encoding='utf-8', errors='ignore')) if PopenObject.returncode != 0: print("###", cmd) EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess) diff --git a/BaseTools/Source/Python/GenFds/Region.py b/BaseTools/Source/Python/GenFds/Region.py index 61c9e182a8..cd81c6df68 100644 --- a/BaseTools/Source/Python/GenFds/Region.py +++ b/BaseTools/Source/Python/GenFds/Region.py @@ -57,8 +57,8 @@ class Region(RegionClassObject): PadByte = pack('B', 0xFF) else: PadByte = pack('B', 0) - PadData = ''.join(PadByte for i in range(0, Size)) - Buffer.write(PadData) + for i in range(0, Size): + Buffer.write(PadByte) ## AddToBuffer() # @@ -127,7 +127,7 @@ class Region(RegionClassObject): if self.FvAddress % FvAlignValue != 0: EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) - FvBuffer = BytesIO('') + FvBuffer = BytesIO() FvBaseAddress = '0x%X' % self.FvAddress BlockSize = None BlockNum = None diff --git a/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py b/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py index c48425d0cd..2fae8e5772 100644 --- a/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py +++ b/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py @@ -120,7 +120,7 @@ if __name__ == '__main__': if Process.returncode != 0: print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') sys.exit(Process.returncode) - print(Version[0]) + print(Version[0].decode()) # # Read input file into a buffer and save input filename diff --git a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py index 1be360f743..e49e819c9a 100644 --- a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py +++ b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py @@ -82,7 +82,7 @@ if __name__ == '__main__': if Process.returncode != 0: print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') sys.exit(Process.returncode) - print(Version[0]) + print(Version[0].decode()) args.PemFileName = [] @@ -117,19 +117,19 @@ if __name__ == '__main__': args.PemFileName.append(Item.name) Item.close() - PublicKeyHash = '' + PublicKeyHash = bytearray() for Item in args.PemFileName: # # Extract public key from private key into STDOUT # Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() + PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip() if Process.returncode != 0: print('ERROR: Unable to extract public key from private key') sys.exit(Process.returncode) - PublicKey = '' + PublicKey = bytearray() for Index in range (0, len(PublicKeyHexString), 2): - PublicKey = PublicKey + chr(int(PublicKeyHexString[Index:Index + 2], 16)) + PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2] # # Generate SHA 256 hash of RSA 2048 bit public key into STDOUT @@ -155,14 +155,14 @@ if __name__ == '__main__': # PublicKeyHashC = '{' for Item in PublicKeyHash: - PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item)) + PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item) PublicKeyHashC = PublicKeyHashC[:-2] + '}' # # Write SHA 256 of 2048 bit binary public key to public key hash C structure file # try: - args.PublicKeyHashCFile.write (PublicKeyHashC) + args.PublicKeyHashCFile.write (bytes(PublicKeyHashC)) args.PublicKeyHashCFile.close () except: pass diff --git a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py index 2856359631..be5ebac280 100644 --- a/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py +++ b/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py @@ -103,7 +103,7 @@ if __name__ == '__main__': if Process.returncode != 0: print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') sys.exit(Process.returncode) - print(Version[0]) + print(Version[0].decode()) # # Read input file into a buffer and save input filename @@ -151,10 +151,11 @@ if __name__ == '__main__': # Extract public key from private key into STDOUT # Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() + PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip() + PublicKeyHexString = PublicKeyHexString.decode(encoding='utf-8') PublicKey = '' while len(PublicKeyHexString) > 0: - PublicKey = PublicKey + chr(int(PublicKeyHexString[0:2], 16)) + PublicKey = PublicKey + PublicKeyHexString[0:2] PublicKeyHexString=PublicKeyHexString[2:] if Process.returncode != 0: sys.exit(Process.returncode) @@ -186,7 +187,7 @@ if __name__ == '__main__': # args.OutputFile = open(args.OutputFileName, 'wb') args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le()) - args.OutputFile.write(PublicKey) + args.OutputFile.write(bytearray.fromhex(PublicKey)) args.OutputFile.write(Signature) args.OutputFile.write(args.InputFileBuffer) args.OutputFile.close() @@ -208,7 +209,7 @@ if __name__ == '__main__': # # Verify the public key # - if Header.PublicKey != PublicKey: + if Header.PublicKey != bytearray.fromhex(PublicKey): print('ERROR: Public key in input file does not match public key from private key file') sys.exit(1) diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py index b9ca83d6d8..4b7c2020b4 100644 --- a/BaseTools/Source/Python/Trim/Trim.py +++ b/BaseTools/Source/Python/Trim/Trim.py @@ -458,7 +458,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile): EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None) # Use a instance of BytesIO to cache data - fStringIO = BytesIO('') + fStringIO = BytesIO() for Item in VfrUniOffsetList: if (Item[0].find("Strings") != -1): @@ -468,8 +468,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile): # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] - UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] - fStringIO.write(''.join(UniGuid)) + fStringIO.write(bytes(UniGuid)) UniValue = pack ('Q', int (Item[1], 16)) fStringIO.write (UniValue) else: @@ -479,9 +478,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile): # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] - VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] - fStringIO.write(''.join(VfrGuid)) - type (Item[1]) + fStringIO.write(bytes(VfrGuid)) VfrValue = pack ('Q', int (Item[1], 16)) fStringIO.write (VfrValue) diff --git a/BaseTools/Source/Python/UPT/Library/StringUtils.py b/BaseTools/Source/Python/UPT/Library/StringUtils.py index 2be382fa17..c2148a44fb 100644 --- a/BaseTools/Source/Python/UPT/Library/StringUtils.py +++ b/BaseTools/Source/Python/UPT/Library/StringUtils.py @@ -680,9 +680,7 @@ def GetHelpTextList(HelpTextClassList): # @param String: the source string # def StringArrayLength(String): - if isinstance(String, unicode): - return (len(String) + 1) * 2 + 1 - elif String.startswith('L"'): + if String.startswith('L"'): return (len(String) - 3 + 1) * 2 elif String.startswith('"'): return (len(String) - 2 + 1) diff --git a/BaseTools/Source/Python/Workspace/DscBuildData.py b/BaseTools/Source/Python/Workspace/DscBuildData.py index 26d2c166fa..9f99aeaa21 100644 --- a/BaseTools/Source/Python/Workspace/DscBuildData.py +++ b/BaseTools/Source/Python/Workspace/DscBuildData.py @@ -130,7 +130,14 @@ def GetDependencyList(FileStack, SearchPathList): continue if FileContent[0] == 0xff or FileContent[0] == 0xfe: - FileContent = unicode(FileContent, "utf-16") + FileContent = str(FileContent, "utf-16") + IncludedFileList = gIncludePattern.findall(FileContent) + else: + try: + FileContent = str(FileContent, "utf-8") + IncludedFileList = gIncludePattern.findall(FileContent) + except: + pass IncludedFileList = gIncludePattern.findall(FileContent) for Inc in IncludedFileList: @@ -1655,7 +1662,7 @@ class DscBuildData(PlatformBuildClassObject): except: EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command) Result = Process.communicate() - return Process.returncode, Result[0], Result[1] + return Process.returncode, Result[0].decode(encoding='utf-8', errors='ignore'), Result[1].decode(encoding='utf-8', errors='ignore') @staticmethod def IntToCString(Value, ValueSize): @@ -2684,7 +2691,7 @@ class DscBuildData(PlatformBuildClassObject): Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {} Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][DefaultStore] = DefaultValue for pcd in Pcds.values(): - SkuInfoObj = pcd.SkuInfoList.values()[0] + SkuInfoObj = list(pcd.SkuInfoList.values())[0] pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName] pcd.DatumType = pcdDecObject.DatumType # Only fix the value while no value provided in DSC file. diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py index 9f2a3bcb5a..470cfed5b6 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -1931,10 +1931,10 @@ class DecParser(MetaFileParser): return if self._include_flag: - self._ValueList[1] = "_" + md5(self._CurrentLine).hexdigest() + self._ValueList[1] = "_" + md5(self._CurrentLine.encode('utf-8')).hexdigest() self._ValueList[2] = self._CurrentLine if self._package_flag and "}" != self._CurrentLine: - self._ValueList[1] = "_" + md5(self._CurrentLine).hexdigest() + self._ValueList[1] = "_" + md5(self._CurrentLine.encode('utf-8')).hexdigest() self._ValueList[2] = self._CurrentLine if self._CurrentLine == "}": self._package_flag = False diff --git a/BaseTools/Source/Python/build/BuildReport.py b/BaseTools/Source/Python/build/BuildReport.py index 33cc9db735..765aa5dbc6 100644 --- a/BaseTools/Source/Python/build/BuildReport.py +++ b/BaseTools/Source/Python/build/BuildReport.py @@ -28,7 +28,7 @@ import hashlib import subprocess import threading from datetime import datetime -from io import BytesIO +from io import StringIO from Common import EdkLogger from Common.Misc import SaveFileOnChange from Common.Misc import GuidStructureByteArrayToGuidString @@ -641,7 +641,7 @@ class ModuleReport(object): Match = gTimeStampPattern.search(FileContents) if Match: - self.BuildTimeStamp = datetime.fromtimestamp(int(Match.group(1))) + self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1))) except IOError: EdkLogger.warn(None, "Fail to read report file", FwReportFileName) @@ -726,8 +726,8 @@ def ReadMessage(From, To, ExitFlag): # read one line a time Line = From.readline() # empty string means "end" - if Line is not None and Line != "": - To(Line.rstrip()) + if Line is not None and Line != b"": + To(Line.rstrip().decode(encoding='utf-8', errors='ignore')) else: break if ExitFlag.isSet(): @@ -2246,7 +2246,7 @@ class BuildReport(object): def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime): if self.ReportFile: try: - File = BytesIO('') + File = StringIO('') for (Wa, MaList) in self.ReportList: PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType) Content = FileLinesSplit(File.getvalue(), gLineMaxLength) diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index 2629d6046d..8208636556 100644 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -18,7 +18,7 @@ # import Common.LongFilePathOs as os import re -from io import BytesIO +from io import StringIO import sys import glob import time @@ -242,8 +242,8 @@ def ReadMessage(From, To, ExitFlag): # read one line a time Line = From.readline() # empty string means "end" - if Line is not None and Line != "": - To(Line.rstrip()) + if Line is not None and Line != b"": + To(Line.rstrip().decode(encoding='utf-8', errors='ignore')) else: break if ExitFlag.isSet(): @@ -1780,7 +1780,7 @@ class Build(): if not Ma.IsLibrary: ModuleList[Ma.Guid.upper()] = Ma - MapBuffer = BytesIO('') + MapBuffer = StringIO('') if self.LoadFixAddress != 0: # # Rebase module to the preferred memory address before GenFds @@ -1938,7 +1938,7 @@ class Build(): if not Ma.IsLibrary: ModuleList[Ma.Guid.upper()] = Ma - MapBuffer = BytesIO('') + MapBuffer = StringIO('') if self.LoadFixAddress != 0: # # Rebase module to the preferred memory address before GenFds @@ -2125,7 +2125,7 @@ class Build(): # # Rebase module to the preferred memory address before GenFds # - MapBuffer = BytesIO('') + MapBuffer = StringIO('') if self.LoadFixAddress != 0: self._CollectModuleMapBuffer(MapBuffer, ModuleList) -- 2.39.2