for files in AllWorkSpaceMetaFiles:\r
if files.endswith('.dec'):\r
continue\r
- f = open(files, 'r')\r
+ f = open(files, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r
m = hashlib.md5()\r
# Get .dec file's hash value\r
- f = open(Pkg.MetaFile.Path, 'r')\r
+ f = open(Pkg.MetaFile.Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
for Root, Dirs, Files in os.walk(str(inc)):\r
for File in sorted(Files):\r
File_Path = os.path.join(Root, File)\r
- f = open(File_Path, 'r')\r
+ f = open(File_Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
for pcd in self._DynamicPcdList:\r
if len(pcd.SkuInfoList) == 1:\r
for (SkuName, SkuId) in allskuset:\r
- if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:\r
+ if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:\r
continue\r
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r
pcd.SkuInfoList[SkuName].SkuId = SkuId\r
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringH)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringIdf)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ fStringIO.write(bytes(UniGuid))\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ fStringIO.write(bytes(VfrGuid))\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
#\r
GlobalData.gModuleHash[self.Arch] = {}\r
m = hashlib.md5()\r
# Add Platform level hash\r
- m.update(GlobalData.gPlatformHash)\r
+ m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r
- m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r
+ m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r
+ m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'r')\r
+ f = open(str(self.MetaFile), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
# Add Module's source files\r
if self.SourceFileList:\r
for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'r')\r
+ f = open(str(File), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
TempBuffer += Buffer\r
elif File.Ext.upper() == '.JPG':\r
ImageType, = struct.unpack('4s', Buffer[6:10])\r
- if ImageType != 'JFIF':\r
+ if ImageType != b'JFIF':\r
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)\r
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)\r
TempBuffer += pack('I', len(Buffer))\r
\r
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):\r
ImageType, = struct.unpack('2s', Buffer[0:2])\r
- if ImageType!= 'BM': # BMP file type is 'BM'\r
+ if ImageType!= b'BM': # BMP file type is 'BM'\r
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)\r
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])\r
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')\r
for Index in range(0, len(PaletteBuffer)):\r
if Index % 4 == 3:\r
continue\r
- PaletteTemp += PaletteBuffer[Index]\r
+ PaletteTemp += bytes([PaletteBuffer[Index]])\r
PaletteBuffer = PaletteTemp[1:]\r
return ImageBuffer, PaletteBuffer\r
\r
\r
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]\r
\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for Datas in self.RawDataList:\r
if type(Datas) in (list, tuple):\r
for Data in Datas:\r
DbItemList.__init__(self, ItemSize, DataList, RawDataList)\r
\r
def PackData(self):\r
- Buffer = ''\r
+ Buffer = bytearray()\r
PackStr = "=LHH"\r
for Datas in self.RawDataList:\r
Buffer += pack(PackStr,\r
def PackData(self):\r
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]\r
\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for DataList in self.RawDataList:\r
for Data in DataList:\r
if type(Data) in (list, tuple):\r
\r
def PackData(self):\r
PackStr = "=LLHHLHH"\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for DataList in self.RawDataList:\r
for Data in DataList:\r
Buffer += pack(PackStr,\r
\r
def PackData(self):\r
PackStr = "=LL"\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for Data in self.RawDataList:\r
Buffer += pack(PackStr,\r
GetIntegerValue(Data[0]),\r
return length * self.ItemSize\r
def PackData(self):\r
PackStr = "=H"\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for Data in self.RawDataList:\r
Buffer += pack(PackStr,\r
GetIntegerValue(Data[0]))\r
Index = 0\r
for Item in DbItemTotal:\r
Index +=1\r
- b = Item.PackData()\r
+ b = bytes(Item.PackData())\r
Buffer += b\r
if Index == InitTableNum:\r
if len(Buffer) % 8:\r
totallenbuff = pack("=L", totallen)\r
newbuffer = databasebuff[:32]\r
for i in range(4):\r
- newbuffer += totallenbuff[i]\r
+ newbuffer += bytes([totallenbuff[i]])\r
for i in range(36, totallen):\r
- newbuffer += databasebuff[i]\r
+ newbuffer += bytes([databasebuff[i]])\r
\r
return newbuffer\r
\r
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)\r
final_data = ()\r
for item in PcdDbBuffer:\r
- final_data += unpack("B", item)\r
+ final_data += unpack("B", bytes([item]))\r
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)\r
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)\r
VarCheckTableData[(skuname, skuid)] = VarCheckTab\r
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)\r
final_data = ()\r
for item in PcdDbBuffer:\r
- final_data += unpack("B", item)\r
+ final_data += unpack("B", bytes([item]))\r
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)\r
\r
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)\r
data = value_list[0]\r
value_list = []\r
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):\r
- value_list.append(hex(unpack("B", data_byte)[0]))\r
+ value_list.append(hex(unpack("B", bytes([data_byte]))[0]))\r
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list\r
try:\r
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"\r
data = value_list[0]\r
value_list = []\r
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):\r
- value_list.append(hex(unpack("B", data_byte)[0]))\r
+ value_list.append(hex(unpack("B", bytes([data_byte]))[0]))\r
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine)\r
for offset in newvalue:\r
value_list,itemPcdname,itemPcdDscLine = newvalue[offset]\r
\r
default_data_array = ()\r
for item in default_data_buffer:\r
- default_data_array += unpack("B", item)\r
+ default_data_array += unpack("B", bytes([item]))\r
\r
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])\r
\r
\r
others_data_array = ()\r
for item in others_data_buffer:\r
- others_data_array += unpack("B", item)\r
+ others_data_array += unpack("B", bytes([item]))\r
\r
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)\r
\r
return []\r
\r
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})\r
- NvStoreDataBuffer = ""\r
+ NvStoreDataBuffer = bytearray()\r
var_data_offset = collections.OrderedDict()\r
offset = NvStorageHeaderSize\r
for default_data, default_info in pcds_default_data.values():\r
\r
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)\r
\r
- data_delta_structure_buffer = ""\r
+ data_delta_structure_buffer = bytearray()\r
for skuname, defaultstore in var_data:\r
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):\r
continue\r
def unpack_data(data):\r
final_data = ()\r
for item in data:\r
- final_data += unpack("B", item)\r
+ final_data += unpack("B", bytes([item]))\r
return final_data\r
\r
@staticmethod\r
\r
@staticmethod\r
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):\r
- Buffer = ""\r
+ Buffer = bytearray()\r
data_len = 0\r
if data_type == DataType.TAB_VOID:\r
for value_char in var_value.strip("{").strip("}").split(","):\r
\r
@staticmethod\r
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):\r
- Buffer = ""\r
+ Buffer = bytearray()\r
Buffer += pack("=L", 4+8+8)\r
Buffer += pack("=Q", int(skuid))\r
Buffer += pack("=Q", int(defaultstoragename))\r
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):\r
skuid = self.GetSkuId(skuname)\r
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)\r
- Buffer = ""\r
+ Buffer = bytearray()\r
Buffer += pack("=L", 4+8+8)\r
Buffer += pack("=Q", int(skuid))\r
Buffer += pack("=Q", int(defaultstorageid))\r
\r
@staticmethod\r
def PACK_VARIABLE_NAME(var_name):\r
- Buffer = ""\r
+ Buffer = bytearray()\r
for name_char in var_name.strip("{").strip("}").split(","):\r
Buffer += pack("=B", int(name_char, 16))\r
\r
# @retval: A list for formatted hex string\r
#\r
def AscToHexList(Ascii):\r
+ if isinstance(Ascii, bytes):\r
+ return ['0x{0:02X}'.format(Item) for Item in Ascii]\r
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]\r
\r
## Create content of .h file\r
\r
gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)\r
\r
-## Convert a python unicode string to a normal string\r
-#\r
-# Convert a python unicode string to a normal string\r
-# UniToStr(u'I am a string') is 'I am a string'\r
-#\r
-# @param Uni: The python unicode string\r
-#\r
-# @retval: The formatted normal string\r
-#\r
-def UniToStr(Uni):\r
- return repr(Uni)[2:-1]\r
-\r
## Convert a unicode string to a Hex list\r
#\r
# Convert a unicode string to a Hex list\r
if EndPos != -1 and EndPos - StartPos == 6 :\r
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):\r
EndStr = Line[EndPos: ]\r
- UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')\r
+ UniStr = Line[StartPos + 2: EndPos]\r
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:\r
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):\r
Line = Line[0 : StartPos] + UniStr + EndStr\r
os.mkdir(dest)\r
BinFileName = "PcdVarCheck.bin"\r
BinFilePath = os.path.join(dest, BinFileName)\r
- Buffer = ''\r
+ Buffer = bytearray()\r
index = 0\r
for var_check_tab in self.var_check_info:\r
index += 1\r
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,\r
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))\r
try:\r
- self.PcdValue = pack('%ds' % Size, ValueString)\r
+ self.PcdValue = pack('%ds' % Size, bytes(ValueString, 'utf-8'))\r
except:\r
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))\r
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
# Write the header of map file.\r
try :\r
# Write Vpd binary file\r
fStringIO.seek (eachPcd.PcdBinOffset)\r
if isinstance(eachPcd.PcdValue, list):\r
- ValueList = [chr(Item) for Item in eachPcd.PcdValue]\r
- fStringIO.write(''.join(ValueList))\r
+ fStringIO.write(bytes(eachPcd.PcdValue))\r
else:\r
fStringIO.write (eachPcd.PcdValue)\r
\r
import os\r
from . import LongFilePathOsPath\r
from Common.LongFilePathSupport import LongFilePath\r
-from Common.LongFilePathSupport import UniToStr\r
import time\r
\r
path = LongFilePathOsPath\r
List = []\r
uList = os.listdir(u"%s" % LongFilePath(path))\r
for Item in uList:\r
- List.append(UniToStr(Item))\r
+ List.append(Item)\r
return List\r
\r
environ = os.environ\r
## @file\r
# Override built in function file.open to provide support for long file path\r
#\r
-# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>\r
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>\r
# This program and the accompanying materials\r
# are licensed and made available under the terms and conditions of the BSD License\r
# which accompanies this distribution. The full text of the license may be found at\r
with open(LongFilePath(src), 'rb') as fsrc:\r
with open(LongFilePath(dst), 'wb') as fdst:\r
shutil.copyfileobj(fsrc, fdst)\r
-\r
-## Convert a python unicode string to a normal string\r
-#\r
-# Convert a python unicode string to a normal string\r
-# UniToStr(u'I am a string') is 'I am a string'\r
-#\r
-# @param Uni: The python unicode string\r
-#\r
-# @retval: The formatted normal string\r
-#\r
-def UniToStr(Uni):\r
- return repr(Uni)[2:-1]\r
# @retval False If the file content is the same\r
#\r
def SaveFileOnChange(File, Content, IsBinaryFile=True):\r
- if not IsBinaryFile:\r
- Content = Content.replace("\n", os.linesep)\r
-\r
if os.path.exists(File):\r
try:\r
if isinstance(Content, bytes):\r
if err:\r
raise BadExpression("DevicePath: %s" % str(err))\r
Size = len(out.split())\r
- out = ','.join(out.split())\r
+ out = ','.join(out.decode(encoding='utf-8', errors='ignore').split())\r
return '{' + out + '}', Size\r
\r
def ParseFieldValue (Value):\r
if Value[0] == '"' and Value[-1] == '"':\r
Value = Value[1:-1]\r
try:\r
- Value = "'" + uuid.UUID(Value).get_bytes_le() + "'"\r
+ Value = "{" + ','.join([str(i) for i in uuid.UUID(Value).bytes_le]) + "}"\r
except ValueError as Message:\r
raise BadExpression(Message)\r
Value, Size = ParseFieldValue(Value)\r
ByteArray = array.array('B')\r
ByteArray.fromfile(PeObject, 4)\r
# PE signature should be 'PE\0\0'\r
- if ByteArray.tostring() != 'PE\0\0':\r
+ if ByteArray.tostring() != b'PE\0\0':\r
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'\r
return\r
\r
return List\r
\r
def StringToArray(String):\r
- if isinstance(String, unicode):\r
- if len(unicode) == 0:\r
- return "{0x00,0x00}"\r
- return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String)\r
- elif String.startswith('L"'):\r
+ if String.startswith('L"'):\r
if String == "L\"\"":\r
return "{0x00,0x00}"\r
else:\r
return '{%s,0,0}' % ','.join(String.split())\r
\r
def StringArrayLength(String):\r
- if isinstance(String, unicode):\r
- return (len(String) + 1) * 2 + 1;\r
- elif String.startswith('L"'):\r
+ if String.startswith('L"'):\r
return (len(String) - 3 + 1) * 2\r
elif String.startswith('"'):\r
return (len(String) - 2 + 1)\r
if (Vpd is None):\r
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")\r
\r
- if not (Offset >= 0 or Offset == "*"):\r
+ if not (Offset >= "0" or Offset == "*"):\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)\r
\r
if Vpd.DatumType == TAB_VOID:\r
- if Vpd.MaxDatumSize <= 0:\r
+ if Vpd.MaxDatumSize <= "0":\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,\r
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:\r
if not Vpd.MaxDatumSize:\r
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]\r
else:\r
- if Vpd.MaxDatumSize <= 0:\r
+ if Vpd.MaxDatumSize <= "0":\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,\r
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
\r
"Invalid parameter FilePath: %s." % FilePath)\r
\r
Content = FILE_COMMENT_TEMPLATE\r
- Pcds = sorted(self._VpdArray.keys())\r
+ Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)\r
for Pcd in Pcds:\r
i = 0\r
PcdTokenCName = Pcd.TokenCName\r
except Exception as X:\r
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))\r
(out, error) = PopenObject.communicate()\r
- print(out)\r
+ print(out.decode(encoding='utf-8', errors='ignore'))\r
while PopenObject.returncode is None :\r
PopenObject.wait()\r
\r
def GenFfs (self, FvName, Dict = {}, IsMakefile = False):\r
DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"\r
PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
AprioriFileGuid = DXE_GUID\r
if self.AprioriType == "PEI":\r
AprioriFileGuid = PEI_GUID\r
import Common.LongFilePathOs as os\r
import subprocess\r
from io import BytesIO\r
+from io import StringIO\r
from Common.Misc import SaveFileOnChange\r
from Common.Misc import PackRegistryFormatGuid\r
import uuid\r
#\r
# The real capsule header structure is 28 bytes\r
#\r
- Header.write('\x00'*(HdrSize-28))\r
+ Header.write(b'\x00'*(HdrSize-28))\r
Header.write(FwMgrHdr.getvalue())\r
Header.write(Content.getvalue())\r
#\r
def GenCapInf(self):\r
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,\r
self.UiCapsuleName + "_Cap" + '.inf')\r
- CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+')\r
+ CapInfFile = StringIO() #open (self.CapInfFileName , 'w+')\r
\r
CapInfFile.writelines("[options]" + T_CHAR_LF)\r
\r
if self.FvName.find('.fv') == -1:\r
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:\r
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]\r
- FdBuffer = BytesIO('')\r
+ FdBuffer = BytesIO()\r
FvObj.CapsuleName = self.CapsuleName\r
FvFile = FvObj.AddToBuffer(FdBuffer)\r
FvObj.CapsuleName = None\r
HasCapsuleRegion = True\r
break\r
if HasCapsuleRegion:\r
- TempFdBuffer = BytesIO('')\r
+ TempFdBuffer = BytesIO()\r
PreviousRegionStart = -1\r
PreviousRegionSize = 1\r
\r
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')\r
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.vtfRawDict, self.DefineVarDict)\r
\r
- FdBuffer = BytesIO('')\r
+ FdBuffer = BytesIO()\r
PreviousRegionStart = -1\r
PreviousRegionSize = 1\r
for RegionObj in self.RegionList :\r
Dict.update(self.DefineVarDict)\r
SectionAlignments = None\r
if self.FvName is not None :\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))\r
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())\r
elif self.FileName is not None:\r
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':\r
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):\r
- FileContent = ''\r
+ FileContent = BytesIO()\r
MaxAlignIndex = 0\r
MaxAlignValue = 1\r
for Index, File in enumerate(self.FileName):\r
if AlignValue > MaxAlignValue:\r
MaxAlignIndex = Index\r
MaxAlignValue = AlignValue\r
- FileContent += Content\r
- if len(FileContent) % AlignValue != 0:\r
+ FileContent.write(Content)\r
+ if len(FileContent.getvalue()) % AlignValue != 0:\r
Size = AlignValue - len(FileContent) % AlignValue\r
for i in range(0, Size):\r
- FileContent += pack('B', 0xFF)\r
+ FileContent.write(pack('B', 0xFF))\r
\r
- if FileContent:\r
+ if FileContent.getvalue() != b'':\r
OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw')\r
- SaveFileOnChange(OutputRAWFile, FileContent, True)\r
+ SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True)\r
self.FileName = OutputRAWFile\r
self.SubAlignment = self.SubAlignment[MaxAlignIndex]\r
\r
def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):\r
\r
# Use a instance of StringIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ fStringIO.write(bytes(UniGuid))\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ fStringIO.write(bytes(VfrGuid))\r
type (Item[1])\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
import Common.LongFilePathOs as os\r
import subprocess\r
from io import BytesIO\r
+from io import StringIO\r
from struct import *\r
\r
from . import Ffs\r
# PI FvHeader is 0x48 byte\r
FvHeaderBuffer = FvFileObj.read(0x48)\r
# FV alignment position.\r
- FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F)\r
+ FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)\r
if FvAlignmentValue >= 0x400:\r
if FvAlignmentValue >= 0x100000:\r
if FvAlignmentValue >= 0x1000000:\r
#\r
self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,\r
self.UiFvName + '.inf')\r
- self.FvInfFile = BytesIO()\r
+ self.FvInfFile = StringIO()\r
\r
#\r
# Add [Options]\r
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))\r
else:\r
TotalSize = 16 + 4\r
- Buffer = ''\r
+ Buffer = bytearray()\r
if self.UsedSizeEnable:\r
TotalSize += (4 + 4)\r
## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03\r
#\r
Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)\r
+ PackGUID(Guid)\r
- + self.UiFvName)\r
+ + bytes(self.UiFvName, 'utf-8'))\r
\r
for Index in range (0, len(self.FvExtEntryType)):\r
if self.FvExtEntryType[Index] == 'FILE':\r
# Generate Fv\r
#\r
if self.FvName is not None:\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)\r
if Fv is not None:\r
self.Fv = Fv\r
# PI FvHeader is 0x48 byte\r
FvHeaderBuffer = FvFileObj.read(0x48)\r
# FV alignment position.\r
- FvAlignmentValue = 1 << (ord (FvHeaderBuffer[0x2E]) & 0x1F)\r
+ FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)\r
# FvAlignmentValue is larger than or equal to 1K\r
if FvAlignmentValue >= 0x400:\r
if FvAlignmentValue >= 0x100000:\r
from . import RuleComplexFile\r
from .EfiSection import EfiSection\r
from io import BytesIO\r
+from io import StringIO\r
import Common.TargetTxtClassObject as TargetTxtClassObject\r
import Common.ToolDefClassObject as ToolDefClassObject\r
from Common.DataType import *\r
return\r
elif GenFds.OnlyGenerateThisFv is None:\r
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
FvObj.AddToBuffer(Buffer)\r
Buffer.close()\r
\r
\r
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):\r
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")\r
- GuidXRefFile = BytesIO('')\r
+ GuidXRefFile = StringIO('')\r
PkgGuidDict = {}\r
GuidDict = {}\r
ModuleList = []\r
return\r
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:\r
GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)\r
- GenFdsGlobalVariable.InfLogger (out)\r
- GenFdsGlobalVariable.InfLogger (error)\r
+ GenFdsGlobalVariable.InfLogger (out.decode(encoding='utf-8',errors='ignore'))\r
+ GenFdsGlobalVariable.InfLogger (error.decode(encoding='utf-8', errors='ignore'))\r
if PopenObject.returncode != 0:\r
print("###", cmd)\r
EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)\r
PadByte = pack('B', 0xFF)\r
else:\r
PadByte = pack('B', 0)\r
- PadData = ''.join(PadByte for i in range(0, Size))\r
- Buffer.write(PadData)\r
+ for i in range(0, Size):\r
+ Buffer.write(PadByte)\r
\r
## AddToBuffer()\r
#\r
if self.FvAddress % FvAlignValue != 0:\r
EdkLogger.error("GenFds", GENFDS_ERROR,\r
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))\r
- FvBuffer = BytesIO('')\r
+ FvBuffer = BytesIO()\r
FvBaseAddress = '0x%X' % self.FvAddress\r
BlockSize = None\r
BlockNum = None\r
if Process.returncode != 0:\r
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')\r
sys.exit(Process.returncode)\r
- print(Version[0])\r
+ print(Version[0].decode())\r
\r
#\r
# Read input file into a buffer and save input filename\r
if Process.returncode != 0:\r
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')\r
sys.exit(Process.returncode)\r
- print(Version[0])\r
+ print(Version[0].decode())\r
\r
args.PemFileName = []\r
\r
args.PemFileName.append(Item.name)\r
Item.close()\r
\r
- PublicKeyHash = ''\r
+ PublicKeyHash = bytearray()\r
for Item in args.PemFileName:\r
#\r
# Extract public key from private key into STDOUT\r
#\r
Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\r
- PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()\r
+ PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()\r
if Process.returncode != 0:\r
print('ERROR: Unable to extract public key from private key')\r
sys.exit(Process.returncode)\r
- PublicKey = ''\r
+ PublicKey = bytearray()\r
for Index in range (0, len(PublicKeyHexString), 2):\r
- PublicKey = PublicKey + chr(int(PublicKeyHexString[Index:Index + 2], 16))\r
+ PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]\r
\r
#\r
# Generate SHA 256 hash of RSA 2048 bit public key into STDOUT\r
#\r
PublicKeyHashC = '{'\r
for Item in PublicKeyHash:\r
- PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item))\r
+ PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)\r
PublicKeyHashC = PublicKeyHashC[:-2] + '}'\r
\r
#\r
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file\r
#\r
try:\r
- args.PublicKeyHashCFile.write (PublicKeyHashC)\r
+ args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))\r
args.PublicKeyHashCFile.close ()\r
except:\r
pass\r
if Process.returncode != 0:\r
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')\r
sys.exit(Process.returncode)\r
- print(Version[0])\r
+ print(Version[0].decode())\r
\r
#\r
# Read input file into a buffer and save input filename\r
# Extract public key from private key into STDOUT\r
#\r
Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\r
- PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()\r
+ PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()\r
+ PublicKeyHexString = PublicKeyHexString.decode(encoding='utf-8')\r
PublicKey = ''\r
while len(PublicKeyHexString) > 0:\r
- PublicKey = PublicKey + chr(int(PublicKeyHexString[0:2], 16))\r
+ PublicKey = PublicKey + PublicKeyHexString[0:2]\r
PublicKeyHexString=PublicKeyHexString[2:]\r
if Process.returncode != 0:\r
sys.exit(Process.returncode)\r
#\r
args.OutputFile = open(args.OutputFileName, 'wb')\r
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le())\r
- args.OutputFile.write(PublicKey)\r
+ args.OutputFile.write(bytearray.fromhex(PublicKey))\r
args.OutputFile.write(Signature)\r
args.OutputFile.write(args.InputFileBuffer)\r
args.OutputFile.close()\r
#\r
# Verify the public key\r
#\r
- if Header.PublicKey != PublicKey:\r
+ if Header.PublicKey != bytearray.fromhex(PublicKey):\r
print('ERROR: Public key in input file does not match public key from private key file')\r
sys.exit(1)\r
\r
EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ fStringIO.write(bytes(UniGuid))\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
- type (Item[1])\r
+ fStringIO.write(bytes(VfrGuid))\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
\r
# @param String: the source string\r
#\r
def StringArrayLength(String):\r
- if isinstance(String, unicode):\r
- return (len(String) + 1) * 2 + 1\r
- elif String.startswith('L"'):\r
+ if String.startswith('L"'):\r
return (len(String) - 3 + 1) * 2\r
elif String.startswith('"'):\r
return (len(String) - 2 + 1)\r
continue\r
\r
if FileContent[0] == 0xff or FileContent[0] == 0xfe:\r
- FileContent = unicode(FileContent, "utf-16")\r
+ FileContent = str(FileContent, "utf-16")\r
+ IncludedFileList = gIncludePattern.findall(FileContent)\r
+ else:\r
+ try:\r
+ FileContent = str(FileContent, "utf-8")\r
+ IncludedFileList = gIncludePattern.findall(FileContent)\r
+ except:\r
+ pass\r
IncludedFileList = gIncludePattern.findall(FileContent)\r
\r
for Inc in IncludedFileList:\r
except:\r
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command)\r
Result = Process.communicate()\r
- return Process.returncode, Result[0], Result[1]\r
+ return Process.returncode, Result[0].decode(encoding='utf-8', errors='ignore'), Result[1].decode(encoding='utf-8', errors='ignore')\r
\r
@staticmethod\r
def IntToCString(Value, ValueSize):\r
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}\r
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][DefaultStore] = DefaultValue\r
for pcd in Pcds.values():\r
- SkuInfoObj = pcd.SkuInfoList.values()[0]\r
+ SkuInfoObj = list(pcd.SkuInfoList.values())[0]\r
pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]\r
pcd.DatumType = pcdDecObject.DatumType\r
# Only fix the value while no value provided in DSC file.\r
return\r
\r
if self._include_flag:\r
- self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine).hexdigest()\r
+ self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()\r
self._ValueList[2] = self._CurrentLine\r
if self._package_flag and "}" != self._CurrentLine:\r
- self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine).hexdigest()\r
+ self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()\r
self._ValueList[2] = self._CurrentLine\r
if self._CurrentLine == "}":\r
self._package_flag = False\r
import subprocess\r
import threading\r
from datetime import datetime\r
-from io import BytesIO\r
+from io import StringIO\r
from Common import EdkLogger\r
from Common.Misc import SaveFileOnChange\r
from Common.Misc import GuidStructureByteArrayToGuidString\r
\r
Match = gTimeStampPattern.search(FileContents)\r
if Match:\r
- self.BuildTimeStamp = datetime.fromtimestamp(int(Match.group(1)))\r
+ self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))\r
except IOError:\r
EdkLogger.warn(None, "Fail to read report file", FwReportFileName)\r
\r
# read one line a time\r
Line = From.readline()\r
# empty string means "end"\r
- if Line is not None and Line != "":\r
- To(Line.rstrip())\r
+ if Line is not None and Line != b"":\r
+ To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))\r
else:\r
break\r
if ExitFlag.isSet():\r
def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime):\r
if self.ReportFile:\r
try:\r
- File = BytesIO('')\r
+ File = StringIO('')\r
for (Wa, MaList) in self.ReportList:\r
PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType)\r
Content = FileLinesSplit(File.getvalue(), gLineMaxLength)\r
#\r
import Common.LongFilePathOs as os\r
import re\r
-from io import BytesIO\r
+from io import StringIO\r
import sys\r
import glob\r
import time\r
# read one line a time\r
Line = From.readline()\r
# empty string means "end"\r
- if Line is not None and Line != "":\r
- To(Line.rstrip())\r
+ if Line is not None and Line != b"":\r
+ To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))\r
else:\r
break\r
if ExitFlag.isSet():\r
if not Ma.IsLibrary:\r
ModuleList[Ma.Guid.upper()] = Ma\r
\r
- MapBuffer = BytesIO('')\r
+ MapBuffer = StringIO('')\r
if self.LoadFixAddress != 0:\r
#\r
# Rebase module to the preferred memory address before GenFds\r
if not Ma.IsLibrary:\r
ModuleList[Ma.Guid.upper()] = Ma\r
\r
- MapBuffer = BytesIO('')\r
+ MapBuffer = StringIO('')\r
if self.LoadFixAddress != 0:\r
#\r
# Rebase module to the preferred memory address before GenFds\r
#\r
# Rebase module to the preferred memory address before GenFds\r
#\r
- MapBuffer = BytesIO('')\r
+ MapBuffer = StringIO('')\r
if self.LoadFixAddress != 0:\r
self._CollectModuleMapBuffer(MapBuffer, ModuleList)\r
\r