for files in AllWorkSpaceMetaFiles:\r
if files.endswith('.dec'):\r
continue\r
- f = open(files, 'r')\r
+ f = open(files, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
- SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)\r
+ SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)\r
GlobalData.gPlatformHash = m.hexdigest()\r
\r
#\r
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r
m = hashlib.md5()\r
# Get .dec file's hash value\r
- f = open(Pkg.MetaFile.Path, 'r')\r
+ f = open(Pkg.MetaFile.Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
for Root, Dirs, Files in os.walk(str(inc)):\r
for File in sorted(Files):\r
File_Path = os.path.join(Root, File)\r
- f = open(File_Path, 'r')\r
+ f = open(File_Path, 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
- SaveFileOnChange(HashFile, m.hexdigest(), True)\r
+ SaveFileOnChange(HashFile, m.hexdigest(), False)\r
GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()\r
\r
def _GetMetaFiles(self, Target, Toolchain, Arch):\r
for pcd in self._DynamicPcdList:\r
if len(pcd.SkuInfoList) == 1:\r
for (SkuName, SkuId) in allskuset:\r
- if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:\r
+ if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:\r
continue\r
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])\r
pcd.SkuInfoList[SkuName].SkuId = SkuId\r
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)\r
ToolsDef += "\n"\r
\r
- SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)\r
+ SaveFileOnChange(self.ToolDefinitionFile, ToolsDef, False)\r
for DllPath in DllPathList:\r
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]\r
os.environ["MAKE_FLAGS"] = MakeFlags\r
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringH)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
RetVal[AutoFile] = str(StringIdf)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# GUID + Offset\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
- UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
+ fStringIO.write(UniGuid)\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# GUID + Offset\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
- VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
+ fStringIO.write(VfrGuid)\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
#\r
GlobalData.gModuleHash[self.Arch] = {}\r
m = hashlib.md5()\r
# Add Platform level hash\r
- m.update(GlobalData.gPlatformHash)\r
+ m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
# Add Package level hash\r
if self.DependentPackageList:\r
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r
- m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r
+ m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))\r
\r
# Add Library hash\r
if self.LibraryAutoGenList:\r
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
Lib.GenModuleHash()\r
- m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r
+ m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
\r
# Add Module self\r
- f = open(str(self.MetaFile), 'r')\r
+ f = open(str(self.MetaFile), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
# Add Module's source files\r
if self.SourceFileList:\r
for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
- f = open(str(File), 'r')\r
+ f = open(str(File), 'rb')\r
Content = f.read()\r
f.close()\r
m.update(Content)\r
if GlobalData.gBinCacheSource:\r
if self.AttemptModuleCacheCopy():\r
return False\r
- return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)\r
+ return SaveFileOnChange(ModuleHashFile, m.hexdigest(), False)\r
\r
## Decide whether we can skip the ModuleAutoGen process\r
def CanSkipbyHash(self):\r
TempBuffer += Buffer\r
elif File.Ext.upper() == '.JPG':\r
ImageType, = struct.unpack('4s', Buffer[6:10])\r
- if ImageType != 'JFIF':\r
+ if ImageType != b'JFIF':\r
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)\r
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)\r
TempBuffer += pack('I', len(Buffer))\r
\r
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):\r
ImageType, = struct.unpack('2s', Buffer[0:2])\r
- if ImageType!= 'BM': # BMP file type is 'BM'\r
+ if ImageType!= b'BM': # BMP file type is 'BM'\r
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)\r
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])\r
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')\r
for Index in range(0, len(PaletteBuffer)):\r
if Index % 4 == 3:\r
continue\r
- PaletteTemp += PaletteBuffer[Index]\r
+ PaletteTemp += PaletteBuffer[Index:Index+1]\r
PaletteBuffer = PaletteTemp[1:]\r
return ImageBuffer, PaletteBuffer\r
\r
CurrentFileDependencyList = DepDb[F]\r
else:\r
try:\r
- Fd = open(F.Path, 'r')\r
+ Fd = open(F.Path, 'rb')\r
+ FileContent = Fd.read()\r
+ Fd.close()\r
except BaseException as X:\r
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))\r
-\r
- FileContent = Fd.read()\r
- Fd.close()\r
if len(FileContent) == 0:\r
continue\r
\r
if FileContent[0] == 0xff or FileContent[0] == 0xfe:\r
- FileContent = unicode(FileContent, "utf-16")\r
+ FileContent = FileContent.decode('utf-16')\r
+ else:\r
+ try:\r
+ FileContent = str(FileContent)\r
+ except:\r
+ pass\r
IncludedFileList = gIncludePattern.findall(FileContent)\r
\r
for Inc in IncludedFileList:\r
\r
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]\r
\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for Datas in self.RawDataList:\r
if type(Datas) in (list, tuple):\r
for Data in Datas:\r
DbItemList.__init__(self, ItemSize, DataList, RawDataList)\r
\r
def PackData(self):\r
- Buffer = ''\r
+ Buffer = bytearray()\r
PackStr = "=LHH"\r
for Datas in self.RawDataList:\r
Buffer += pack(PackStr,\r
def PackData(self):\r
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]\r
\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for DataList in self.RawDataList:\r
for Data in DataList:\r
if type(Data) in (list, tuple):\r
\r
def PackData(self):\r
PackStr = "=LLHHLHH"\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for DataList in self.RawDataList:\r
for Data in DataList:\r
Buffer += pack(PackStr,\r
\r
def PackData(self):\r
PackStr = "=LL"\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for Data in self.RawDataList:\r
Buffer += pack(PackStr,\r
GetIntegerValue(Data[0]),\r
return length * self.ItemSize\r
def PackData(self):\r
PackStr = "=H"\r
- Buffer = ''\r
+ Buffer = bytearray()\r
for Data in self.RawDataList:\r
Buffer += pack(PackStr,\r
GetIntegerValue(Data[0]))\r
Index = 0\r
for Item in DbItemTotal:\r
Index +=1\r
- b = Item.PackData()\r
- Buffer += b\r
+ packdata = Item.PackData()\r
+ for i in range(len(packdata)):\r
+ Buffer += packdata[i:i + 1]\r
if Index == InitTableNum:\r
if len(Buffer) % 8:\r
for num in range(8 - len(Buffer) % 8):\r
totallenbuff = pack("=L", totallen)\r
newbuffer = databasebuff[:32]\r
for i in range(4):\r
- newbuffer += totallenbuff[i]\r
+ newbuffer += totallenbuff[i:i+1]\r
for i in range(36, totallen):\r
- newbuffer += databasebuff[i]\r
+ newbuffer += databasebuff[i:i+1]\r
\r
return newbuffer\r
\r
for skuname, skuid in DynamicPcdSet_Sku:\r
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)\r
final_data = ()\r
- for item in PcdDbBuffer:\r
- final_data += unpack("B", item)\r
+ for item in range(len(PcdDbBuffer)):\r
+ final_data += unpack("B", PcdDbBuffer[item:item+1])\r
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)\r
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)\r
VarCheckTableData[(skuname, skuid)] = VarCheckTab\r
else:\r
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)\r
final_data = ()\r
- for item in PcdDbBuffer:\r
- final_data += unpack("B", item)\r
+ for item in range(len(PcdDbBuffer)):\r
+ final_data += unpack("B", PcdDbBuffer[item:item + 1])\r
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)\r
\r
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)\r
fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]]\r
fisrtdata = fisrtvalue_list[0]\r
fisrtvalue_list = []\r
- for data_byte in pack(fisrtdata_flag, int(fisrtdata, 16) if fisrtdata.upper().startswith('0X') else int(fisrtdata)):\r
- fisrtvalue_list.append(hex(unpack("B", data_byte)[0]))\r
+ pack_data = pack(fisrtdata_flag, int(fisrtdata, 0))\r
+ for data_byte in range(len(pack_data)):\r
+ fisrtvalue_list.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))\r
newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list\r
\r
for var_item in sku_var_info_offset_list[1:]:\r
data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]]\r
data = CurvalueList[0]\r
CurvalueList = []\r
- for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):\r
- CurvalueList.append(hex(unpack("B", data_byte)[0]))\r
+ pack_data = pack(data_flag, int(data, 0))\r
+ for data_byte in range(len(pack_data)):\r
+ CurvalueList.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))\r
if CurOffset > len(newvalue_list):\r
newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList\r
else:\r
default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)\r
\r
default_data_array = ()\r
- for item in default_data_buffer:\r
- default_data_array += unpack("B", item)\r
+ for item in range(len(default_data_buffer)):\r
+ default_data_array += unpack("B", default_data_buffer[item:item + 1])\r
\r
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])\r
\r
others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)\r
\r
others_data_array = ()\r
- for item in others_data_buffer:\r
- others_data_array += unpack("B", item)\r
+ for item in range(len(others_data_buffer)):\r
+ others_data_array += unpack("B", others_data_buffer[item:item + 1])\r
\r
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)\r
\r
return []\r
\r
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})\r
- NvStoreDataBuffer = ""\r
+ NvStoreDataBuffer = bytearray()\r
var_data_offset = collections.OrderedDict()\r
offset = NvStorageHeaderSize\r
for default_data, default_info in pcds_default_data.values():\r
\r
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)\r
\r
- data_delta_structure_buffer = ""\r
+ data_delta_structure_buffer = bytearray()\r
for skuname, defaultstore in var_data:\r
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):\r
continue\r
@staticmethod\r
def unpack_data(data):\r
final_data = ()\r
- for item in data:\r
- final_data += unpack("B", item)\r
+ for item in range(len(data)):\r
+ final_data += unpack("B", data[item:item + 1])\r
return final_data\r
\r
@staticmethod\r
\r
@staticmethod\r
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):\r
- Buffer = ""\r
+ Buffer = bytearray()\r
data_len = 0\r
if data_type == DataType.TAB_VOID:\r
for value_char in var_value.strip("{").strip("}").split(","):\r
\r
@staticmethod\r
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):\r
- Buffer = ""\r
+ Buffer = bytearray()\r
Buffer += pack("=L", 4+8+8)\r
Buffer += pack("=Q", int(skuid))\r
Buffer += pack("=Q", int(defaultstoragename))\r
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):\r
skuid = self.GetSkuId(skuname)\r
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)\r
- Buffer = ""\r
+ Buffer = bytearray()\r
Buffer += pack("=L", 4+8+8)\r
Buffer += pack("=Q", int(skuid))\r
Buffer += pack("=Q", int(defaultstorageid))\r
\r
@staticmethod\r
def PACK_VARIABLE_NAME(var_name):\r
- Buffer = ""\r
+ Buffer = bytearray()\r
for name_char in var_name.strip("{").strip("}").split(","):\r
Buffer += pack("=B", int(name_char, 16))\r
\r
SectionData = []\r
\r
try:\r
- FileLinesList = open(self._FilePath, "r", 0).readlines()\r
+ FileLinesList = open(self._FilePath, "r").readlines()\r
except BaseException:\r
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)\r
\r
# @retval: A list for formatted hex string\r
#\r
def AscToHexList(Ascii):\r
- return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]\r
+ try:\r
+ return ['0x{0:02X}'.format(Item) for Item in Ascii]\r
+ except:\r
+ return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]\r
\r
## Create content of .h file\r
#\r
from Common.BuildToolError import *\r
from Common.StringUtils import GetLineNo\r
from Common.Misc import PathClass\r
-from Common.LongFilePathSupport import LongFilePath, UniToStr\r
+from Common.LongFilePathSupport import LongFilePath\r
from Common.GlobalData import *\r
##\r
# Static definitions\r
if EndPos != -1 and EndPos - StartPos == 6 :\r
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):\r
EndStr = Line[EndPos: ]\r
- UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')\r
+ UniStr = Line[StartPos + 2: EndPos]\r
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:\r
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):\r
Line = Line[0 : StartPos] + UniStr + EndStr\r
os.mkdir(dest)\r
BinFileName = "PcdVarCheck.bin"\r
BinFilePath = os.path.join(dest, BinFileName)\r
- Buffer = ''\r
+ Buffer = bytearray()\r
index = 0\r
for var_check_tab in self.var_check_info:\r
index += 1\r
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,\r
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))\r
try:\r
- self.PcdValue = pack('%ds' % Size, ValueString)\r
+ self.PcdValue = pack('%ds' % Size, ValueString.encode('utf-8'))\r
except:\r
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,\r
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))\r
self.PcdFixedOffsetSizeList = []\r
self.PcdUnknownOffsetList = []\r
try:\r
- fInputfile = open(InputFileName, "r", 0)\r
+ fInputfile = open(InputFileName, "r")\r
try:\r
self.FileLinesList = fInputfile.readlines()\r
except:\r
#Open an VPD file to process\r
\r
try:\r
- fVpdFile = open(BinFileName, "wb", 0)\r
+ fVpdFile = open(BinFileName, "wb")\r
except:\r
# Open failed\r
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)\r
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
# Write the header of map file.\r
try :\r
import os\r
from . import LongFilePathOsPath\r
from Common.LongFilePathSupport import LongFilePath\r
-from Common.LongFilePathSupport import UniToStr\r
import time\r
\r
path = LongFilePathOsPath\r
List = []\r
uList = os.listdir(u"%s" % LongFilePath(path))\r
for Item in uList:\r
- List.append(UniToStr(Item))\r
+ List.append(Item)\r
return List\r
\r
environ = os.environ\r
with open(LongFilePath(src), 'rb') as fsrc:\r
with open(LongFilePath(dst), 'wb') as fdst:\r
shutil.copyfileobj(fsrc, fdst)\r
-\r
-## Convert a python unicode string to a normal string\r
-#\r
-# Convert a python unicode string to a normal string\r
-# UniToStr(u'I am a string') is 'I am a string'\r
-#\r
-# @param Uni: The python unicode string\r
-#\r
-# @retval: The formatted normal string\r
-#\r
-def UniToStr(Uni):\r
- return repr(Uni)[2:-1]\r
# @retval False If the file content is the same\r
#\r
def SaveFileOnChange(File, Content, IsBinaryFile=True):\r
- if not IsBinaryFile:\r
- Content = Content.replace("\n", os.linesep)\r
\r
if os.path.exists(File):\r
- try:\r
- if Content == open(File, "rb").read():\r
- return False\r
- except:\r
- EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)\r
+ if IsBinaryFile:\r
+ try:\r
+ with open(File, "rb") as f:\r
+ if Content == f.read():\r
+ return False\r
+ except:\r
+ EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)\r
+ else:\r
+ try:\r
+ with open(File, "r") as f:\r
+ if Content == f.read():\r
+ return False\r
+ except:\r
+ EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)\r
\r
DirName = os.path.dirname(File)\r
if not CreateDirectory(DirName):\r
if not os.access(DirName, os.W_OK):\r
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)\r
\r
- try:\r
- Fd = open(File, "wb")\r
- Fd.write(Content)\r
- Fd.close()\r
- except IOError as X:\r
- EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
+ if IsBinaryFile:\r
+ try:\r
+ with open(File, "wb") as Fd:\r
+ Fd.write(Content)\r
+ except IOError as X:\r
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
+ else:\r
+ try:\r
+ with open(File, 'w') as Fd:\r
+ Fd.write(Content)\r
+ except IOError as X:\r
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)\r
\r
return True\r
\r
if Value[0] == '"' and Value[-1] == '"':\r
Value = Value[1:-1]\r
try:\r
- Value = "'" + uuid.UUID(Value).bytes_le + "'"\r
+ Value = str(uuid.UUID(Value).bytes_le)\r
+ if Value.startswith("b'"):\r
+ Value = Value[2:-1]\r
+ Value = "'" + Value + "'"\r
except ValueError as Message:\r
raise BadExpression(Message)\r
Value, Size = ParseFieldValue(Value)\r
ByteArray = array.array('B')\r
ByteArray.fromfile(PeObject, 4)\r
# PE signature should be 'PE\0\0'\r
- if ByteArray.tostring() != 'PE\0\0':\r
+ if ByteArray.tostring() != b'PE\0\0':\r
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'\r
return\r
\r
# @retval Value The integer value that the input represents\r
#\r
def GetIntegerValue(Input):\r
- if type(Input) in (int, long):\r
+ if not isinstance(Input, str):\r
return Input\r
String = Input\r
if String.endswith("U"):\r
return List\r
\r
def StringToArray(String):\r
- if isinstance(String, unicode):\r
- if len(unicode) == 0:\r
- return "{0x00,0x00}"\r
- return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String)\r
- elif String.startswith('L"'):\r
+ if String.startswith('L"'):\r
if String == "L\"\"":\r
return "{0x00,0x00}"\r
else:\r
return '{%s,0,0}' % ','.join(String.split())\r
\r
def StringArrayLength(String):\r
- if isinstance(String, unicode):\r
- return (len(String) + 1) * 2 + 1;\r
- elif String.startswith('L"'):\r
+ if String.startswith('L"'):\r
return (len(String) - 3 + 1) * 2\r
elif String.startswith('"'):\r
return (len(String) - 2 + 1)\r
if (Vpd is None):\r
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")\r
\r
- if not (Offset >= 0 or Offset == TAB_STAR):\r
+ if not (Offset >= "0" or Offset == TAB_STAR):\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)\r
\r
if Vpd.DatumType == TAB_VOID:\r
- if Vpd.MaxDatumSize <= 0:\r
+ if Vpd.MaxDatumSize <= "0":\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,\r
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:\r
if not Vpd.MaxDatumSize:\r
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]\r
else:\r
- if Vpd.MaxDatumSize <= 0:\r
+ if Vpd.MaxDatumSize <= "0":\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,\r
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
\r
"Invalid parameter FilePath: %s." % FilePath)\r
\r
Content = FILE_COMMENT_TEMPLATE\r
- Pcds = sorted(self._VpdArray.keys())\r
+ Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)\r
for Pcd in Pcds:\r
i = 0\r
PcdTokenCName = Pcd.TokenCName\r
except Exception as X:\r
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))\r
(out, error) = PopenObject.communicate()\r
- print(out)\r
+ print(out.decode(encoding='utf-8', errors='ignore'))\r
while PopenObject.returncode is None :\r
PopenObject.wait()\r
\r
# @retval string Generated file name\r
#\r
def GenFfs (self, FvName, Dict = {}, IsMakefile = False):\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
if self.AprioriType == "PEI":\r
AprioriFileGuid = PEI_APRIORI_GUID\r
else:\r
#\r
# The real capsule header structure is 28 bytes\r
#\r
- Header.write('\x00'*(HdrSize-28))\r
+ Header.write(b'\x00'*(HdrSize-28))\r
Header.write(FwMgrHdr.getvalue())\r
Header.write(Content.getvalue())\r
#\r
return self.GenFmpCapsule()\r
\r
CapInfFile = self.GenCapInf()\r
- CapInfFile.writelines("[files]" + TAB_LINE_BREAK)\r
+ CapInfFile.append("[files]" + TAB_LINE_BREAK)\r
CapFileList = []\r
for CapsuleDataObj in self.CapsuleDataList:\r
CapsuleDataObj.CapsuleName = self.CapsuleName\r
FileName = CapsuleDataObj.GenCapsuleSubItem()\r
CapsuleDataObj.CapsuleName = None\r
CapFileList.append(FileName)\r
- CapInfFile.writelines("EFI_FILE_NAME = " + \\r
+ CapInfFile.append("EFI_FILE_NAME = " + \\r
FileName + \\r
TAB_LINE_BREAK)\r
- SaveFileOnChange(self.CapInfFileName, CapInfFile.getvalue(), False)\r
- CapInfFile.close()\r
+ SaveFileOnChange(self.CapInfFileName, ''.join(CapInfFile), False)\r
#\r
# Call GenFv tool to generate capsule\r
#\r
def GenCapInf(self):\r
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,\r
self.UiCapsuleName + "_Cap" + '.inf')\r
- CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+')\r
+ CapInfFile = []\r
\r
- CapInfFile.writelines("[options]" + TAB_LINE_BREAK)\r
+ CapInfFile.append("[options]" + TAB_LINE_BREAK)\r
\r
for Item in self.TokensDict:\r
- CapInfFile.writelines("EFI_" + \\r
+ CapInfFile.append("EFI_" + \\r
Item + \\r
' = ' + \\r
self.TokensDict[Item] + \\r
if self.FvName.find('.fv') == -1:\r
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:\r
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]\r
- FdBuffer = BytesIO('')\r
+ FdBuffer = BytesIO()\r
FvObj.CapsuleName = self.CapsuleName\r
FvFile = FvObj.AddToBuffer(FdBuffer)\r
FvObj.CapsuleName = None\r
HasCapsuleRegion = True\r
break\r
if HasCapsuleRegion:\r
- TempFdBuffer = BytesIO('')\r
+ TempFdBuffer = BytesIO()\r
PreviousRegionStart = -1\r
PreviousRegionSize = 1\r
\r
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')\r
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)\r
\r
- FdBuffer = BytesIO('')\r
+ FdBuffer = BytesIO()\r
PreviousRegionStart = -1\r
PreviousRegionSize = 1\r
for RegionObj in self.RegionList :\r
self.FileName = FileName\r
self.FileLinesList = []\r
try:\r
- with open(FileName, "rb", 0) as fsock:\r
+ with open(FileName, "r") as fsock:\r
self.FileLinesList = fsock.readlines()\r
for index, line in enumerate(self.FileLinesList):\r
if not line.endswith(TAB_LINE_BREAK):\r
def __init__(self, FileName):\r
self.FileLinesList = []\r
try:\r
- with open(FileName, "rb", 0) as fsock:\r
+ with open(FileName, "r") as fsock:\r
self.FileLinesList = fsock.readlines()\r
\r
except:\r
Dict.update(self.DefineVarDict)\r
SectionAlignments = None\r
if self.FvName:\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))\r
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())\r
elif self.FileName:\r
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':\r
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):\r
- FileContent = ''\r
+ FileContent = BytesIO()\r
MaxAlignIndex = 0\r
MaxAlignValue = 1\r
for Index, File in enumerate(self.FileName):\r
if AlignValue > MaxAlignValue:\r
MaxAlignIndex = Index\r
MaxAlignValue = AlignValue\r
- FileContent += Content\r
- if len(FileContent) % AlignValue != 0:\r
- Size = AlignValue - len(FileContent) % AlignValue\r
+ FileContent.write(Content)\r
+ if len(FileContent.getvalue()) % AlignValue != 0:\r
+ Size = AlignValue - len(FileContent.getvalue()) % AlignValue\r
for i in range(0, Size):\r
- FileContent += pack('B', 0xFF)\r
+ FileContent.write(pack('B', 0xFF))\r
\r
- if FileContent:\r
+ if FileContent.getvalue() != b'':\r
OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw')\r
- SaveFileOnChange(OutputRAWFile, FileContent, True)\r
+ SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True)\r
self.FileName = OutputRAWFile\r
self.SubAlignment = self.SubAlignment[MaxAlignIndex]\r
\r
def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):\r
\r
# Use a instance of StringIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# GUID + Offset\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
- UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
+ fStringIO.write(UniGuid)\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# GUID + Offset\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
- VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
+ fStringIO.write(VfrGuid)\r
type (Item[1])\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
FfsFileList.append(FileName)\r
# Add Apriori file name to Inf file\r
if not Flag:\r
- self.FvInfFile.writelines("EFI_FILE_NAME = " + \\r
+ self.FvInfFile.append("EFI_FILE_NAME = " + \\r
FileName + \\r
TAB_LINE_BREAK)\r
\r
FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress, IsMakefile=Flag, FvName=self.UiFvName)\r
FfsFileList.append(FileName)\r
if not Flag:\r
- self.FvInfFile.writelines("EFI_FILE_NAME = " + \\r
+ self.FvInfFile.append("EFI_FILE_NAME = " + \\r
FileName + \\r
TAB_LINE_BREAK)\r
if not Flag:\r
- SaveFileOnChange(self.InfFileName, self.FvInfFile.getvalue(), False)\r
- self.FvInfFile.close()\r
+ FvInfFile = ''.join(self.FvInfFile)\r
+ SaveFileOnChange(self.InfFileName, FvInfFile, False)\r
#\r
# Call GenFv tool\r
#\r
# PI FvHeader is 0x48 byte\r
FvHeaderBuffer = FvFileObj.read(0x48)\r
Signature = FvHeaderBuffer[0x28:0x32]\r
- if Signature and Signature.startswith('_FVH'):\r
+ if Signature and Signature.startswith(b'_FVH'):\r
GenFdsGlobalVariable.VerboseLogger("\nGenerate %s FV Successfully" % self.UiFvName)\r
GenFdsGlobalVariable.SharpCounter = 0\r
\r
FvFileObj.seek(0)\r
Buffer.write(FvFileObj.read())\r
# FV alignment position.\r
- FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F)\r
+ FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E:0x2F]) & 0x1F)\r
if FvAlignmentValue >= 0x400:\r
if FvAlignmentValue >= 0x100000:\r
if FvAlignmentValue >= 0x1000000:\r
#\r
self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,\r
self.UiFvName + '.inf')\r
- self.FvInfFile = BytesIO()\r
+ self.FvInfFile = []\r
\r
#\r
# Add [Options]\r
#\r
- self.FvInfFile.writelines("[options]" + TAB_LINE_BREAK)\r
+ self.FvInfFile.append("[options]" + TAB_LINE_BREAK)\r
if BaseAddress is not None:\r
- self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \\r
+ self.FvInfFile.append("EFI_BASE_ADDRESS = " + \\r
BaseAddress + \\r
TAB_LINE_BREAK)\r
\r
if BlockSize is not None:\r
- self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \\r
+ self.FvInfFile.append("EFI_BLOCK_SIZE = " + \\r
'0x%X' %BlockSize + \\r
TAB_LINE_BREAK)\r
if BlockNum is not None:\r
- self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \\r
+ self.FvInfFile.append("EFI_NUM_BLOCKS = " + \\r
' 0x%X' %BlockNum + \\r
TAB_LINE_BREAK)\r
else:\r
if self.BlockSizeList == []:\r
if not self._GetBlockSize():\r
#set default block size is 1\r
- self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)\r
+ self.FvInfFile.append("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)\r
\r
for BlockSize in self.BlockSizeList:\r
if BlockSize[0] is not None:\r
- self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \\r
+ self.FvInfFile.append("EFI_BLOCK_SIZE = " + \\r
'0x%X' %BlockSize[0] + \\r
TAB_LINE_BREAK)\r
\r
if BlockSize[1] is not None:\r
- self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \\r
+ self.FvInfFile.append("EFI_NUM_BLOCKS = " + \\r
' 0x%X' %BlockSize[1] + \\r
TAB_LINE_BREAK)\r
\r
if self.BsBaseAddress is not None:\r
- self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \\r
+ self.FvInfFile.append('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \\r
'0x%X' %self.BsBaseAddress)\r
if self.RtBaseAddress is not None:\r
- self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \\r
+ self.FvInfFile.append('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \\r
'0x%X' %self.RtBaseAddress)\r
#\r
# Add attribute\r
#\r
- self.FvInfFile.writelines("[attributes]" + TAB_LINE_BREAK)\r
+ self.FvInfFile.append("[attributes]" + TAB_LINE_BREAK)\r
\r
- self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \\r
+ self.FvInfFile.append("EFI_ERASE_POLARITY = " + \\r
' %s' %ErasePloarity + \\r
TAB_LINE_BREAK)\r
if not (self.FvAttributeDict is None):\r
if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1'):\r
self.UsedSizeEnable = True\r
continue\r
- self.FvInfFile.writelines("EFI_" + \\r
+ self.FvInfFile.append("EFI_" + \\r
FvAttribute + \\r
' = ' + \\r
self.FvAttributeDict[FvAttribute] + \\r
TAB_LINE_BREAK )\r
if self.FvAlignment is not None:\r
- self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \\r
+ self.FvInfFile.append("EFI_FVB2_ALIGNMENT_" + \\r
self.FvAlignment.strip() + \\r
" = TRUE" + \\r
TAB_LINE_BREAK)\r
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))\r
else:\r
TotalSize = 16 + 4\r
- Buffer = ''\r
+ Buffer = bytearray()\r
if self.UsedSizeEnable:\r
TotalSize += (4 + 4)\r
## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03\r
#\r
Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)\r
+ PackGUID(Guid)\r
- + self.UiFvName)\r
+ + self.UiFvName.encode('utf-8'))\r
\r
for Index in range (0, len(self.FvExtEntryType)):\r
if self.FvExtEntryType[Index] == 'FILE':\r
if Changed:\r
if os.path.exists (self.InfFileName):\r
os.remove (self.InfFileName)\r
- self.FvInfFile.writelines("EFI_FV_EXT_HEADER_FILE_NAME = " + \\r
+ self.FvInfFile.append("EFI_FV_EXT_HEADER_FILE_NAME = " + \\r
FvExtHeaderFileName + \\r
TAB_LINE_BREAK)\r
\r
#\r
# Add [Files]\r
#\r
- self.FvInfFile.writelines("[files]" + TAB_LINE_BREAK)\r
+ self.FvInfFile.append("[files]" + TAB_LINE_BREAK)\r
# Generate Fv\r
#\r
if self.FvName is not None:\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)\r
if Fv is not None:\r
self.Fv = Fv\r
return\r
elif GenFds.OnlyGenerateThisFv is None:\r
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():\r
- Buffer = BytesIO('')\r
+ Buffer = BytesIO()\r
FvObj.AddToBuffer(Buffer)\r
Buffer.close()\r
\r
@staticmethod\r
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):\r
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")\r
- GuidXRefFile = BytesIO('')\r
+ GuidXRefFile = []\r
PkgGuidDict = {}\r
GuidDict = {}\r
ModuleList = []\r
else:\r
ModuleList.append(Module)\r
if GlobalData.gGuidPattern.match(ModuleFile.BaseName):\r
- GuidXRefFile.write("%s %s\n" % (ModuleFile.BaseName, Module.BaseName))\r
+ GuidXRefFile.append("%s %s\n" % (ModuleFile.BaseName, Module.BaseName))\r
else:\r
- GuidXRefFile.write("%s %s\n" % (Module.Guid, Module.BaseName))\r
+ GuidXRefFile.append("%s %s\n" % (Module.Guid, Module.BaseName))\r
GuidDict.update(Module.Protocols)\r
GuidDict.update(Module.Guids)\r
GuidDict.update(Module.Ppis)\r
continue\r
else:\r
ModuleList.append(FdfModule)\r
- GuidXRefFile.write("%s %s\n" % (FdfModule.Guid, FdfModule.BaseName))\r
+ GuidXRefFile.append("%s %s\n" % (FdfModule.Guid, FdfModule.BaseName))\r
GuidDict.update(FdfModule.Protocols)\r
GuidDict.update(FdfModule.Guids)\r
GuidDict.update(FdfModule.Ppis)\r
continue\r
\r
Name = ' '.join(Name) if isinstance(Name, type([])) else Name\r
- GuidXRefFile.write("%s %s\n" %(FileStatementGuid, Name))\r
+ GuidXRefFile.append("%s %s\n" %(FileStatementGuid, Name))\r
\r
# Append GUIDs, Protocols, and PPIs to the Xref file\r
- GuidXRefFile.write("\n")\r
+ GuidXRefFile.append("\n")\r
for key, item in GuidDict.items():\r
- GuidXRefFile.write("%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key))\r
+ GuidXRefFile.append("%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key))\r
\r
- if GuidXRefFile.getvalue():\r
- SaveFileOnChange(GuidXRefFileName, GuidXRefFile.getvalue(), False)\r
+ if GuidXRefFile:\r
+ GuidXRefFile = ''.join(GuidXRefFile)\r
+ SaveFileOnChange(GuidXRefFileName, GuidXRefFile, False)\r
GenFdsGlobalVariable.InfLogger("\nGUID cross reference file can be found at %s" % GuidXRefFileName)\r
elif os.path.exists(GuidXRefFileName):\r
os.remove(GuidXRefFileName)\r
- GuidXRefFile.close()\r
\r
\r
if __name__ == '__main__':\r
return\r
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:\r
GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)\r
- GenFdsGlobalVariable.InfLogger (out)\r
- GenFdsGlobalVariable.InfLogger (error)\r
+ GenFdsGlobalVariable.InfLogger(out.decode(encoding='utf-8', errors='ignore'))\r
+ GenFdsGlobalVariable.InfLogger(error.decode(encoding='utf-8', errors='ignore'))\r
if PopenObject.returncode != 0:\r
print("###", cmd)\r
EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)\r
PadByte = pack('B', 0xFF)\r
else:\r
PadByte = pack('B', 0)\r
- PadData = ''.join(PadByte for i in range(0, Size))\r
- Buffer.write(PadData)\r
+ for i in range(0, Size):\r
+ Buffer.write(PadByte)\r
\r
## AddToBuffer()\r
#\r
if self.FvAddress % FvAlignValue != 0:\r
EdkLogger.error("GenFds", GENFDS_ERROR,\r
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))\r
- FvBuffer = BytesIO('')\r
+ FvBuffer = BytesIO()\r
FvBaseAddress = '0x%X' % self.FvAddress\r
BlockSize = None\r
BlockNum = None\r
if Process.returncode != 0:\r
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')\r
sys.exit(Process.returncode)\r
- print(Version[0])\r
+ print(Version[0].decode())\r
\r
#\r
# Read input file into a buffer and save input filename\r
if Process.returncode != 0:\r
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')\r
sys.exit(Process.returncode)\r
- print(Version[0])\r
+ print(Version[0].decode())\r
\r
args.PemFileName = []\r
\r
args.PemFileName.append(Item.name)\r
Item.close()\r
\r
- PublicKeyHash = ''\r
+ PublicKeyHash = bytearray()\r
for Item in args.PemFileName:\r
#\r
# Extract public key from private key into STDOUT\r
#\r
Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\r
- PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()\r
+ PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()\r
if Process.returncode != 0:\r
print('ERROR: Unable to extract public key from private key')\r
sys.exit(Process.returncode)\r
- PublicKey = ''\r
+ PublicKey = bytearray()\r
for Index in range (0, len(PublicKeyHexString), 2):\r
- PublicKey = PublicKey + chr(int(PublicKeyHexString[Index:Index + 2], 16))\r
+ PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]\r
\r
#\r
# Generate SHA 256 hash of RSA 2048 bit public key into STDOUT\r
#\r
PublicKeyHashC = '{'\r
for Item in PublicKeyHash:\r
- PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item))\r
+ PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)\r
PublicKeyHashC = PublicKeyHashC[:-2] + '}'\r
\r
#\r
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file\r
#\r
try:\r
- args.PublicKeyHashCFile.write (PublicKeyHashC)\r
+ args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))\r
args.PublicKeyHashCFile.close ()\r
except:\r
pass\r
if Process.returncode != 0:\r
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')\r
sys.exit(Process.returncode)\r
- print(Version[0])\r
+ print(Version[0].decode('utf-8'))\r
\r
#\r
# Read input file into a buffer and save input filename\r
# Extract public key from private key into STDOUT\r
#\r
Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\r
- PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()\r
+ PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()\r
+ PublicKeyHexString = PublicKeyHexString.decode('utf-8')\r
PublicKey = ''\r
while len(PublicKeyHexString) > 0:\r
PublicKey = PublicKey + PublicKeyHexString[0:2]\r
#\r
# Verify the public key\r
#\r
- if Header.PublicKey != PublicKey:\r
+ if Header.PublicKey != bytearray.fromhex(PublicKey):\r
print('ERROR: Public key in input file does not match public key from private key file')\r
sys.exit(1)\r
\r
\r
# save to file\r
try:\r
- f = open (Target, 'wb')\r
+ f = open (Target, 'w')\r
except:\r
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)\r
f.writelines(NewLines)\r
EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)\r
\r
# Use a instance of BytesIO to cache data\r
- fStringIO = BytesIO('')\r
+ fStringIO = BytesIO()\r
\r
for Item in VfrUniOffsetList:\r
if (Item[0].find("Strings") != -1):\r
# GUID + Offset\r
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r
#\r
- UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]\r
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]\r
- fStringIO.write(''.join(UniGuid))\r
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r
+ fStringIO.write(UniGuid)\r
UniValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (UniValue)\r
else:\r
# GUID + Offset\r
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r
#\r
- VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]\r
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]\r
- fStringIO.write(''.join(VfrGuid))\r
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r
+ fStringIO.write(VfrGuid)\r
type (Item[1])\r
VfrValue = pack ('Q', int (Item[1], 16))\r
fStringIO.write (VfrValue)\r
CreateDirectory(os.path.dirname(Target))\r
\r
try:\r
- f = open (Source, 'rb')\r
+ f = open (Source, 'r')\r
except:\r
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)\r
# read whole file\r
return\r
\r
try:\r
- f = open (Target, 'wb')\r
+ f = open (Target, 'w')\r
except:\r
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)\r
f.write(NewLines)\r
# @param String: the source string\r
#\r
def StringArrayLength(String):\r
- if isinstance(String, unicode):\r
- return (len(String) + 1) * 2 + 1\r
- elif String.startswith('L"'):\r
+ if String.startswith('L"'):\r
return (len(String) - 3 + 1) * 2\r
elif String.startswith('"'):\r
return (len(String) - 2 + 1)\r
deme = ArrayIndex.findall(demesionattr)\r
for i in range(len(deme)-1):\r
if int(deme[i].lstrip("[").rstrip("]").strip()) > int(self._Capacity[i]):\r
- print "error"\r
+ print ("error")\r
if hasattr(self,"DefaultValues"):\r
for demesionattr in self.DefaultValues:\r
deme = ArrayIndex.findall(demesionattr)\r
for i in range(len(deme)-1):\r
if int(deme[i].lstrip("[").rstrip("]").strip()) > int(self._Capacity[i]):\r
- print "error"\r
+ print ("error")\r
return self._Capacity\r
@property\r
def DatumType(self):\r
continue\r
\r
if FileContent[0] == 0xff or FileContent[0] == 0xfe:\r
- FileContent = unicode(FileContent, "utf-16")\r
+ FileContent = FileContent.decode('utf-16')\r
+ IncludedFileList = gIncludePattern.findall(FileContent)\r
+ else:\r
+ try:\r
+ FileContent = str(FileContent)\r
+ IncludedFileList = gIncludePattern.findall(FileContent)\r
+ except:\r
+ pass\r
IncludedFileList = gIncludePattern.findall(FileContent)\r
\r
for Inc in IncludedFileList:\r
FdfInfList = GlobalData.gFdfParser.Profile.InfList\r
FdfModuleList = [PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch) for Inf in FdfInfList]\r
AllModulePcds = set()\r
- ModuleSet = set(self._Modules.keys() + FdfModuleList)\r
+ ModuleSet = set(list(self._Modules.keys()) + FdfModuleList)\r
for ModuleFile in ModuleSet:\r
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]\r
AllModulePcds = AllModulePcds | ModuleData.PcdsName\r
except:\r
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command)\r
Result = Process.communicate()\r
- return Process.returncode, Result[0], Result[1]\r
+ return Process.returncode, Result[0].decode(encoding='utf-8', errors='ignore'), Result[1].decode(encoding='utf-8', errors='ignore')\r
\r
@staticmethod\r
def IntToCString(Value, ValueSize):\r
return\r
\r
if self._include_flag:\r
- self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine).hexdigest()\r
+ self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()\r
self._ValueList[2] = self._CurrentLine\r
if self._package_flag and "}" != self._CurrentLine:\r
- self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine).hexdigest()\r
+ self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()\r
self._ValueList[2] = self._CurrentLine\r
if self._CurrentLine == "}":\r
self._package_flag = False\r
def FileWrite(File, String, Wrapper=False):\r
if Wrapper:\r
String = textwrap.fill(String, 120)\r
- File.write(String + gEndOfLine)\r
+ File.append(String + gEndOfLine)\r
\r
def ByteArrayForamt(Value):\r
IsByteArray = False\r
\r
Match = gTimeStampPattern.search(FileContents)\r
if Match:\r
- self.BuildTimeStamp = datetime.fromtimestamp(int(Match.group(1)))\r
+ self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))\r
except IOError:\r
EdkLogger.warn(None, "Fail to read report file", FwReportFileName)\r
\r
# read one line a time\r
Line = From.readline()\r
# empty string means "end"\r
- if Line is not None and Line != "":\r
- To(Line.rstrip())\r
+ if Line is not None and Line != b"":\r
+ To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))\r
else:\r
break\r
if ExitFlag.isSet():\r
def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime):\r
if self.ReportFile:\r
try:\r
- File = BytesIO('')\r
+ File = []\r
for (Wa, MaList) in self.ReportList:\r
PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType)\r
- Content = FileLinesSplit(File.getvalue(), gLineMaxLength)\r
- SaveFileOnChange(self.ReportFile, Content, True)\r
+ Content = FileLinesSplit(''.join(File), gLineMaxLength)\r
+ SaveFileOnChange(self.ReportFile, Content, False)\r
EdkLogger.quiet("Build report can be found at %s" % os.path.abspath(self.ReportFile))\r
except IOError:\r
EdkLogger.error(None, FILE_WRITE_FAILURE, ExtraData=self.ReportFile)\r
except:\r
EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report", ExtraData=self.ReportFile, RaiseError=False)\r
EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))\r
- File.close()\r
\r
# This acts like the main() function for the script, unless it is 'import'ed into another script.\r
if __name__ == '__main__':\r
from __future__ import print_function\r
import Common.LongFilePathOs as os\r
import re\r
-from io import BytesIO\r
import sys\r
import glob\r
import time\r
# read one line a time\r
Line = From.readline()\r
# empty string means "end"\r
- if Line is not None and Line != "":\r
- To(Line.rstrip())\r
+ if Line is not None and Line != b"":\r
+ To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))\r
else:\r
break\r
if ExitFlag.isSet():\r
# Add general information.\r
#\r
if ModeIsSmm:\r
- MapBuffer.write('\n\n%s (Fixed SMRAM Offset, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))\r
+ MapBuffer.append('\n\n%s (Fixed SMRAM Offset, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))\r
elif AddrIsOffset:\r
- MapBuffer.write('\n\n%s (Fixed Memory Offset, BaseAddress=-0x%010X, EntryPoint=-0x%010X)\n' % (ModuleName, 0 - BaseAddress, 0 - (BaseAddress + ModuleInfo.Image.EntryPoint)))\r
+ MapBuffer.append('\n\n%s (Fixed Memory Offset, BaseAddress=-0x%010X, EntryPoint=-0x%010X)\n' % (ModuleName, 0 - BaseAddress, 0 - (BaseAddress + ModuleInfo.Image.EntryPoint)))\r
else:\r
- MapBuffer.write('\n\n%s (Fixed Memory Address, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))\r
+ MapBuffer.append('\n\n%s (Fixed Memory Address, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))\r
#\r
# Add guid and general seciton section.\r
#\r
elif SectionHeader[0] in ['.data', '.sdata']:\r
DataSectionAddress = SectionHeader[1]\r
if AddrIsOffset:\r
- MapBuffer.write('(GUID=%s, .textbaseaddress=-0x%010X, .databaseaddress=-0x%010X)\n' % (ModuleInfo.Guid, 0 - (BaseAddress + TextSectionAddress), 0 - (BaseAddress + DataSectionAddress)))\r
+ MapBuffer.append('(GUID=%s, .textbaseaddress=-0x%010X, .databaseaddress=-0x%010X)\n' % (ModuleInfo.Guid, 0 - (BaseAddress + TextSectionAddress), 0 - (BaseAddress + DataSectionAddress)))\r
else:\r
- MapBuffer.write('(GUID=%s, .textbaseaddress=0x%010X, .databaseaddress=0x%010X)\n' % (ModuleInfo.Guid, BaseAddress + TextSectionAddress, BaseAddress + DataSectionAddress))\r
+ MapBuffer.append('(GUID=%s, .textbaseaddress=0x%010X, .databaseaddress=0x%010X)\n' % (ModuleInfo.Guid, BaseAddress + TextSectionAddress, BaseAddress + DataSectionAddress))\r
#\r
# Add debug image full path.\r
#\r
- MapBuffer.write('(IMAGE=%s)\n\n' % (ModuleDebugImage))\r
+ MapBuffer.append('(IMAGE=%s)\n\n' % (ModuleDebugImage))\r
#\r
# Add funtion address\r
#\r
for Function in FunctionList:\r
if AddrIsOffset:\r
- MapBuffer.write(' -0x%010X %s\n' % (0 - (BaseAddress + Function[1]), Function[0]))\r
+ MapBuffer.append(' -0x%010X %s\n' % (0 - (BaseAddress + Function[1]), Function[0]))\r
else:\r
- MapBuffer.write(' 0x%010X %s\n' % (BaseAddress + Function[1], Function[0]))\r
+ MapBuffer.append(' 0x%010X %s\n' % (BaseAddress + Function[1], Function[0]))\r
ImageMap.close()\r
\r
#\r
GuidString = MatchGuid.group()\r
if GuidString.upper() in ModuleList:\r
Line = Line.replace(GuidString, ModuleList[GuidString.upper()].Name)\r
- MapBuffer.write(Line)\r
+ MapBuffer.append(Line)\r
#\r
# Add the debug image full path.\r
#\r
if MatchGuid is not None:\r
GuidString = MatchGuid.group().split("=")[1]\r
if GuidString.upper() in ModuleList:\r
- MapBuffer.write('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))\r
+ MapBuffer.append('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))\r
\r
FvMap.close()\r
\r
if ReturnValue != 0:\r
EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)\r
\r
- MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize // 0x1000))\r
- MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize // 0x1000))\r
- MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize // 0x1000))\r
+ MapBuffer.append('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize // 0x1000))\r
+ MapBuffer.append('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize // 0x1000))\r
+ MapBuffer.append('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize // 0x1000))\r
if len (SmmModuleList) > 0:\r
- MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize // 0x1000))\r
+ MapBuffer.append('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize // 0x1000))\r
\r
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize\r
BtBaseAddr = TopMemoryAddress - RtSize\r
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)\r
self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)\r
self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)\r
- MapBuffer.write('\n\n')\r
+ MapBuffer.append('\n\n')\r
sys.stdout.write ("\n")\r
sys.stdout.flush()\r
\r
#\r
# Save address map into MAP file.\r
#\r
- SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False)\r
- MapBuffer.close()\r
+ SaveFileOnChange(MapFilePath, ''.join(MapBuffer), False)\r
if self.LoadFixAddress != 0:\r
sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))\r
sys.stdout.flush()\r
if not Ma.IsLibrary:\r
ModuleList[Ma.Guid.upper()] = Ma\r
\r
- MapBuffer = BytesIO('')\r
+ MapBuffer = []\r
if self.LoadFixAddress != 0:\r
#\r
# Rebase module to the preferred memory address before GenFds\r
if not Ma.IsLibrary:\r
ModuleList[Ma.Guid.upper()] = Ma\r
\r
- MapBuffer = BytesIO('')\r
+ MapBuffer = []\r
if self.LoadFixAddress != 0:\r
#\r
# Rebase module to the preferred memory address before GenFds\r
#\r
# Rebase module to the preferred memory address before GenFds\r
#\r
- MapBuffer = BytesIO('')\r
+ MapBuffer = []\r
if self.LoadFixAddress != 0:\r
self._CollectModuleMapBuffer(MapBuffer, ModuleList)\r
\r