BaseTools: Handle the bytes and str difference

Deal with bytes and str is different, remove the unicode()
Using utcfromtimestamp instead of fromtimestamp.

Cc: Liming Gao <liming.gao@intel.com>
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Yunhua Feng <yunhuax.feng@intel.com>
Reviewed-by: Liming Gao <liming.gao@intel.com>
This commit is contained in:
Yunhua Feng
2018-10-11 11:20:59 +08:00
committed by Yonghong Zhu
parent a09f4c91f7
commit 86e6cf98a8
33 changed files with 131 additions and 162 deletions

View File

@@ -661,7 +661,7 @@ class WorkspaceAutoGen(AutoGen):
for files in AllWorkSpaceMetaFiles: for files in AllWorkSpaceMetaFiles:
if files.endswith('.dec'): if files.endswith('.dec'):
continue continue
f = open(files, 'r') f = open(files, 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
@@ -690,7 +690,7 @@ class WorkspaceAutoGen(AutoGen):
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash') HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
m = hashlib.md5() m = hashlib.md5()
# Get .dec file's hash value # Get .dec file's hash value
f = open(Pkg.MetaFile.Path, 'r') f = open(Pkg.MetaFile.Path, 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
@@ -700,7 +700,7 @@ class WorkspaceAutoGen(AutoGen):
for Root, Dirs, Files in os.walk(str(inc)): for Root, Dirs, Files in os.walk(str(inc)):
for File in sorted(Files): for File in sorted(Files):
File_Path = os.path.join(Root, File) File_Path = os.path.join(Root, File)
f = open(File_Path, 'r') f = open(File_Path, 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
@@ -1602,7 +1602,7 @@ class PlatformAutoGen(AutoGen):
for pcd in self._DynamicPcdList: for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1: if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset: for (SkuName, SkuId) in allskuset:
if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0: if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
continue continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT]) pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId pcd.SkuInfoList[SkuName].SkuId = SkuId
@@ -3207,7 +3207,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir) AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringH) RetVal[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "": if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir) AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = UniStringBinBuffer.getvalue() RetVal[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True AutoFile.IsBinary = True
@@ -3218,7 +3218,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir) AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringIdf) RetVal[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "": if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir) AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = IdfGenBinBuffer.getvalue() RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True AutoFile.IsBinary = True
@@ -3449,7 +3449,7 @@ class ModuleAutoGen(AutoGen):
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None) EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of BytesIO to cache data # Use a instance of BytesIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
for Item in VfrUniOffsetList: for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1): if (Item[0].find("Strings") != -1):
@@ -3459,8 +3459,7 @@ class ModuleAutoGen(AutoGen):
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
# #
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(bytes(UniGuid))
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16)) UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue) fStringIO.write (UniValue)
else: else:
@@ -3470,8 +3469,7 @@ class ModuleAutoGen(AutoGen):
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
# #
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(bytes(VfrGuid))
fStringIO.write(''.join(VfrGuid))
VfrValue = pack ('Q', int (Item[1], 16)) VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue) fStringIO.write (VfrValue)
# #
@@ -4019,29 +4017,29 @@ class ModuleAutoGen(AutoGen):
GlobalData.gModuleHash[self.Arch] = {} GlobalData.gModuleHash[self.Arch] = {}
m = hashlib.md5() m = hashlib.md5()
# Add Platform level hash # Add Platform level hash
m.update(GlobalData.gPlatformHash) m.update(GlobalData.gPlatformHash.encode('utf-8'))
# Add Package level hash # Add Package level hash
if self.DependentPackageList: if self.DependentPackageList:
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName): for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]: if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName]) m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))
# Add Library hash # Add Library hash
if self.LibraryAutoGenList: if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name): for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
if Lib.Name not in GlobalData.gModuleHash[self.Arch]: if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
Lib.GenModuleHash() Lib.GenModuleHash()
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name]) m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
# Add Module self # Add Module self
f = open(str(self.MetaFile), 'r') f = open(str(self.MetaFile), 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
# Add Module's source files # Add Module's source files
if self.SourceFileList: if self.SourceFileList:
for File in sorted(self.SourceFileList, key=lambda x: str(x)): for File in sorted(self.SourceFileList, key=lambda x: str(x)):
f = open(str(File), 'r') f = open(str(File), 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)

View File

@@ -1795,7 +1795,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
TempBuffer += Buffer TempBuffer += Buffer
elif File.Ext.upper() == '.JPG': elif File.Ext.upper() == '.JPG':
ImageType, = struct.unpack('4s', Buffer[6:10]) ImageType, = struct.unpack('4s', Buffer[6:10])
if ImageType != 'JFIF': if ImageType != b'JFIF':
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path) EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG) TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
TempBuffer += pack('I', len(Buffer)) TempBuffer += pack('I', len(Buffer))
@@ -1895,7 +1895,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2]) ImageType, = struct.unpack('2s', Buffer[0:2])
if ImageType!= 'BM': # BMP file type is 'BM' if ImageType!= b'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path) EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant']) BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII') BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
@@ -1967,7 +1967,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
for Index in range(0, len(PaletteBuffer)): for Index in range(0, len(PaletteBuffer)):
if Index % 4 == 3: if Index % 4 == 3:
continue continue
PaletteTemp += PaletteBuffer[Index] PaletteTemp += bytes([PaletteBuffer[Index]])
PaletteBuffer = PaletteTemp[1:] PaletteBuffer = PaletteTemp[1:]
return ImageBuffer, PaletteBuffer return ImageBuffer, PaletteBuffer

View File

@@ -291,7 +291,7 @@ class DbItemList:
PackStr = PACK_CODE_BY_SIZE[self.ItemSize] PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = '' Buffer = bytearray()
for Datas in self.RawDataList: for Datas in self.RawDataList:
if type(Datas) in (list, tuple): if type(Datas) in (list, tuple):
for Data in Datas: for Data in Datas:
@@ -316,7 +316,7 @@ class DbExMapTblItemList (DbItemList):
DbItemList.__init__(self, ItemSize, DataList, RawDataList) DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self): def PackData(self):
Buffer = '' Buffer = bytearray()
PackStr = "=LHH" PackStr = "=LHH"
for Datas in self.RawDataList: for Datas in self.RawDataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
@@ -365,7 +365,7 @@ class DbComItemList (DbItemList):
def PackData(self): def PackData(self):
PackStr = PACK_CODE_BY_SIZE[self.ItemSize] PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = '' Buffer = bytearray()
for DataList in self.RawDataList: for DataList in self.RawDataList:
for Data in DataList: for Data in DataList:
if type(Data) in (list, tuple): if type(Data) in (list, tuple):
@@ -386,7 +386,7 @@ class DbVariableTableItemList (DbComItemList):
def PackData(self): def PackData(self):
PackStr = "=LLHHLHH" PackStr = "=LLHHLHH"
Buffer = '' Buffer = bytearray()
for DataList in self.RawDataList: for DataList in self.RawDataList:
for Data in DataList: for Data in DataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
@@ -447,7 +447,7 @@ class DbSkuHeadTableItemList (DbItemList):
def PackData(self): def PackData(self):
PackStr = "=LL" PackStr = "=LL"
Buffer = '' Buffer = bytearray()
for Data in self.RawDataList: for Data in self.RawDataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
GetIntegerValue(Data[0]), GetIntegerValue(Data[0]),
@@ -469,7 +469,7 @@ class DbSizeTableItemList (DbItemList):
return length * self.ItemSize return length * self.ItemSize
def PackData(self): def PackData(self):
PackStr = "=H" PackStr = "=H"
Buffer = '' Buffer = bytearray()
for Data in self.RawDataList: for Data in self.RawDataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
GetIntegerValue(Data[0])) GetIntegerValue(Data[0]))
@@ -849,7 +849,7 @@ def BuildExDataBase(Dict):
Index = 0 Index = 0
for Item in DbItemTotal: for Item in DbItemTotal:
Index +=1 Index +=1
b = Item.PackData() b = bytes(Item.PackData())
Buffer += b Buffer += b
if Index == InitTableNum: if Index == InitTableNum:
if len(Buffer) % 8: if len(Buffer) % 8:
@@ -917,9 +917,9 @@ def CreatePcdDataBase(PcdDBData):
totallenbuff = pack("=L", totallen) totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32] newbuffer = databasebuff[:32]
for i in range(4): for i in range(4):
newbuffer += totallenbuff[i] newbuffer += bytes([totallenbuff[i]])
for i in range(36, totallen): for i in range(36, totallen):
newbuffer += databasebuff[i] newbuffer += bytes([databasebuff[i]])
return newbuffer return newbuffer
@@ -962,7 +962,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase) AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = () final_data = ()
for item in PcdDbBuffer: for item in PcdDbBuffer:
final_data += unpack("B", item) final_data += unpack("B", bytes([item]))
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data) PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC) PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname, skuid)] = VarCheckTab VarCheckTableData[(skuname, skuid)] = VarCheckTab
@@ -975,7 +975,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase) AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = () final_data = ()
for item in PcdDbBuffer: for item in PcdDbBuffer:
final_data += unpack("B", item) final_data += unpack("B", bytes([item]))
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data) PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData) return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)

View File

@@ -66,7 +66,7 @@ class VariableMgr(object):
data = value_list[0] data = value_list[0]
value_list = [] value_list = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)): for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
value_list.append(hex(unpack("B", data_byte)[0])) value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list
try: try:
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}" newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"
@@ -87,7 +87,7 @@ class VariableMgr(object):
data = value_list[0] data = value_list[0]
value_list = [] value_list = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)): for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
value_list.append(hex(unpack("B", data_byte)[0])) value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine) newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine)
for offset in newvalue: for offset in newvalue:
value_list,itemPcdname,itemPcdDscLine = newvalue[offset] value_list,itemPcdname,itemPcdDscLine = newvalue[offset]
@@ -161,7 +161,7 @@ class VariableMgr(object):
default_data_array = () default_data_array = ()
for item in default_data_buffer: for item in default_data_buffer:
default_data_array += unpack("B", item) default_data_array += unpack("B", bytes([item]))
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)]) var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
@@ -179,7 +179,7 @@ class VariableMgr(object):
others_data_array = () others_data_array = ()
for item in others_data_buffer: for item in others_data_buffer:
others_data_array += unpack("B", item) others_data_array += unpack("B", bytes([item]))
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array) data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
@@ -195,7 +195,7 @@ class VariableMgr(object):
return [] return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {}) pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
NvStoreDataBuffer = "" NvStoreDataBuffer = bytearray()
var_data_offset = collections.OrderedDict() var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize offset = NvStorageHeaderSize
for default_data, default_info in pcds_default_data.values(): for default_data, default_info in pcds_default_data.values():
@@ -222,7 +222,7 @@ class VariableMgr(object):
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8) nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
data_delta_structure_buffer = "" data_delta_structure_buffer = bytearray()
for skuname, defaultstore in var_data: for skuname, defaultstore in var_data:
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT): if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue continue
@@ -254,7 +254,7 @@ class VariableMgr(object):
def unpack_data(data): def unpack_data(data):
final_data = () final_data = ()
for item in data: for item in data:
final_data += unpack("B", item) final_data += unpack("B", bytes([item]))
return final_data return final_data
@staticmethod @staticmethod
@@ -322,7 +322,7 @@ class VariableMgr(object):
@staticmethod @staticmethod
def PACK_VARIABLES_DATA(var_value,data_type, tail = None): def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
Buffer = "" Buffer = bytearray()
data_len = 0 data_len = 0
if data_type == DataType.TAB_VOID: if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","): for value_char in var_value.strip("{").strip("}").split(","):
@@ -352,7 +352,7 @@ class VariableMgr(object):
@staticmethod @staticmethod
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value): def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
Buffer = "" Buffer = bytearray()
Buffer += pack("=L", 4+8+8) Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid)) Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstoragename)) Buffer += pack("=Q", int(defaultstoragename))
@@ -377,7 +377,7 @@ class VariableMgr(object):
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list): def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname) skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename) defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
Buffer = "" Buffer = bytearray()
Buffer += pack("=L", 4+8+8) Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid)) Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstorageid)) Buffer += pack("=Q", int(defaultstorageid))
@@ -400,7 +400,7 @@ class VariableMgr(object):
@staticmethod @staticmethod
def PACK_VARIABLE_NAME(var_name): def PACK_VARIABLE_NAME(var_name):
Buffer = "" Buffer = bytearray()
for name_char in var_name.strip("{").strip("}").split(","): for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B", int(name_char, 16)) Buffer += pack("=B", int(name_char, 16))

View File

@@ -122,6 +122,8 @@ def DecToHexList(Dec, Digit = 8):
# @retval: A list for formatted hex string # @retval: A list for formatted hex string
# #
def AscToHexList(Ascii): def AscToHexList(Ascii):
if isinstance(Ascii, bytes):
return ['0x{0:02X}'.format(Item) for Item in Ascii]
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii] return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
## Create content of .h file ## Create content of .h file

View File

@@ -45,18 +45,6 @@ BACK_SLASH_PLACEHOLDER = u'\u0006'
gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE) gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
## Convert a python unicode string to a normal string
#
# Convert a python unicode string to a normal string
# UniToStr(u'I am a string') is 'I am a string'
#
# @param Uni: The python unicode string
#
# @retval: The formatted normal string
#
def UniToStr(Uni):
return repr(Uni)[2:-1]
## Convert a unicode string to a Hex list ## Convert a unicode string to a Hex list
# #
# Convert a unicode string to a Hex list # Convert a unicode string to a Hex list
@@ -438,7 +426,7 @@ class UniFileClassObject(object):
if EndPos != -1 and EndPos - StartPos == 6 : if EndPos != -1 and EndPos - StartPos == 6 :
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE): if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
EndStr = Line[EndPos: ] EndStr = Line[EndPos: ]
UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape') UniStr = Line[StartPos + 2: EndPos]
if EndStr.startswith(u'\\x') and len(EndStr) >= 7: if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE): if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
Line = Line[0 : StartPos] + UniStr + EndStr Line = Line[0 : StartPos] + UniStr + EndStr

View File

@@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
os.mkdir(dest) os.mkdir(dest)
BinFileName = "PcdVarCheck.bin" BinFileName = "PcdVarCheck.bin"
BinFilePath = os.path.join(dest, BinFileName) BinFilePath = os.path.join(dest, BinFileName)
Buffer = '' Buffer = bytearray()
index = 0 index = 0
for var_check_tab in self.var_check_info: for var_check_tab in self.var_check_info:
index += 1 index += 1

View File

@@ -185,7 +185,7 @@ class PcdEntry:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno)) "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try: try:
self.PcdValue = pack('%ds' % Size, ValueString) self.PcdValue = pack('%ds' % Size, bytes(ValueString, 'utf-8'))
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
@@ -656,7 +656,7 @@ class GenVPD :
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of BytesIO to cache data # Use a instance of BytesIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
# Write the header of map file. # Write the header of map file.
try : try :
@@ -674,8 +674,7 @@ class GenVPD :
# Write Vpd binary file # Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset) fStringIO.seek (eachPcd.PcdBinOffset)
if isinstance(eachPcd.PcdValue, list): if isinstance(eachPcd.PcdValue, list):
ValueList = [chr(Item) for Item in eachPcd.PcdValue] fStringIO.write(bytes(eachPcd.PcdValue))
fStringIO.write(''.join(ValueList))
else: else:
fStringIO.write (eachPcd.PcdValue) fStringIO.write (eachPcd.PcdValue)

View File

@@ -14,7 +14,6 @@
import os import os
from . import LongFilePathOsPath from . import LongFilePathOsPath
from Common.LongFilePathSupport import LongFilePath from Common.LongFilePathSupport import LongFilePath
from Common.LongFilePathSupport import UniToStr
import time import time
path = LongFilePathOsPath path = LongFilePathOsPath
@@ -63,7 +62,7 @@ def listdir(path):
List = [] List = []
uList = os.listdir(u"%s" % LongFilePath(path)) uList = os.listdir(u"%s" % LongFilePath(path))
for Item in uList: for Item in uList:
List.append(UniToStr(Item)) List.append(Item)
return List return List
environ = os.environ environ = os.environ

View File

@@ -1,7 +1,7 @@
## @file ## @file
# Override built in function file.open to provide support for long file path # Override built in function file.open to provide support for long file path
# #
# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR> # Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials # This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License # are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at # which accompanies this distribution. The full text of the license may be found at
@@ -49,15 +49,3 @@ def CopyLongFilePath(src, dst):
with open(LongFilePath(src), 'rb') as fsrc: with open(LongFilePath(src), 'rb') as fsrc:
with open(LongFilePath(dst), 'wb') as fdst: with open(LongFilePath(dst), 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst) shutil.copyfileobj(fsrc, fdst)
## Convert a python unicode string to a normal string
#
# Convert a python unicode string to a normal string
# UniToStr(u'I am a string') is 'I am a string'
#
# @param Uni: The python unicode string
#
# @retval: The formatted normal string
#
def UniToStr(Uni):
return repr(Uni)[2:-1]

View File

@@ -454,9 +454,6 @@ def RemoveDirectory(Directory, Recursively=False):
# @retval False If the file content is the same # @retval False If the file content is the same
# #
def SaveFileOnChange(File, Content, IsBinaryFile=True): def SaveFileOnChange(File, Content, IsBinaryFile=True):
if not IsBinaryFile:
Content = Content.replace("\n", os.linesep)
if os.path.exists(File): if os.path.exists(File):
try: try:
if isinstance(Content, bytes): if isinstance(Content, bytes):
@@ -1308,7 +1305,7 @@ def ParseDevPathValue (Value):
if err: if err:
raise BadExpression("DevicePath: %s" % str(err)) raise BadExpression("DevicePath: %s" % str(err))
Size = len(out.split()) Size = len(out.split())
out = ','.join(out.split()) out = ','.join(out.decode(encoding='utf-8', errors='ignore').split())
return '{' + out + '}', Size return '{' + out + '}', Size
def ParseFieldValue (Value): def ParseFieldValue (Value):
@@ -1347,7 +1344,7 @@ def ParseFieldValue (Value):
if Value[0] == '"' and Value[-1] == '"': if Value[0] == '"' and Value[-1] == '"':
Value = Value[1:-1] Value = Value[1:-1]
try: try:
Value = "'" + uuid.UUID(Value).get_bytes_le() + "'" Value = "{" + ','.join([str(i) for i in uuid.UUID(Value).bytes_le]) + "}"
except ValueError as Message: except ValueError as Message:
raise BadExpression(Message) raise BadExpression(Message)
Value, Size = ParseFieldValue(Value) Value, Size = ParseFieldValue(Value)
@@ -1871,7 +1868,7 @@ class PeImageClass():
ByteArray = array.array('B') ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 4) ByteArray.fromfile(PeObject, 4)
# PE signature should be 'PE\0\0' # PE signature should be 'PE\0\0'
if ByteArray.tostring() != 'PE\0\0': if ByteArray.tostring() != b'PE\0\0':
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00' self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
return return

View File

@@ -815,11 +815,7 @@ def GetHelpTextList(HelpTextClassList):
return List return List
def StringToArray(String): def StringToArray(String):
if isinstance(String, unicode): if String.startswith('L"'):
if len(unicode) == 0:
return "{0x00,0x00}"
return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String)
elif String.startswith('L"'):
if String == "L\"\"": if String == "L\"\"":
return "{0x00,0x00}" return "{0x00,0x00}"
else: else:
@@ -842,9 +838,7 @@ def StringToArray(String):
return '{%s,0,0}' % ','.join(String.split()) return '{%s,0,0}' % ','.join(String.split())
def StringArrayLength(String): def StringArrayLength(String):
if isinstance(String, unicode): if String.startswith('L"'):
return (len(String) + 1) * 2 + 1;
elif String.startswith('L"'):
return (len(String) - 3 + 1) * 2 return (len(String) - 3 + 1) * 2
elif String.startswith('"'): elif String.startswith('"'):
return (len(String) - 2 + 1) return (len(String) - 2 + 1)

View File

@@ -91,18 +91,18 @@ class VpdInfoFile:
if (Vpd is None): if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.") EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == "*"): if not (Offset >= "0" or Offset == "*"):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset) EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == TAB_VOID: if Vpd.DatumType == TAB_VOID:
if Vpd.MaxDatumSize <= 0: if Vpd.MaxDatumSize <= "0":
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)) "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES: elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
if not Vpd.MaxDatumSize: if not Vpd.MaxDatumSize:
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType] Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
else: else:
if Vpd.MaxDatumSize <= 0: if Vpd.MaxDatumSize <= "0":
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)) "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
@@ -126,7 +126,7 @@ class VpdInfoFile:
"Invalid parameter FilePath: %s." % FilePath) "Invalid parameter FilePath: %s." % FilePath)
Content = FILE_COMMENT_TEMPLATE Content = FILE_COMMENT_TEMPLATE
Pcds = sorted(self._VpdArray.keys()) Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)
for Pcd in Pcds: for Pcd in Pcds:
i = 0 i = 0
PcdTokenCName = Pcd.TokenCName PcdTokenCName = Pcd.TokenCName
@@ -248,7 +248,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
except Exception as X: except Exception as X:
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X)) EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
(out, error) = PopenObject.communicate() (out, error) = PopenObject.communicate()
print(out) print(out.decode(encoding='utf-8', errors='ignore'))
while PopenObject.returncode is None : while PopenObject.returncode is None :
PopenObject.wait() PopenObject.wait()

View File

@@ -51,7 +51,7 @@ class AprioriSection (AprioriSectionClassObject):
def GenFfs (self, FvName, Dict = {}, IsMakefile = False): def GenFfs (self, FvName, Dict = {}, IsMakefile = False):
DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881" DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6" PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
Buffer = BytesIO('') Buffer = BytesIO()
AprioriFileGuid = DXE_GUID AprioriFileGuid = DXE_GUID
if self.AprioriType == "PEI": if self.AprioriType == "PEI":
AprioriFileGuid = PEI_GUID AprioriFileGuid = PEI_GUID

View File

@@ -21,6 +21,7 @@ from CommonDataClass.FdfClass import CapsuleClassObject
import Common.LongFilePathOs as os import Common.LongFilePathOs as os
import subprocess import subprocess
from io import BytesIO from io import BytesIO
from io import StringIO
from Common.Misc import SaveFileOnChange from Common.Misc import SaveFileOnChange
from Common.Misc import PackRegistryFormatGuid from Common.Misc import PackRegistryFormatGuid
import uuid import uuid
@@ -184,7 +185,7 @@ class Capsule (CapsuleClassObject) :
# #
# The real capsule header structure is 28 bytes # The real capsule header structure is 28 bytes
# #
Header.write('\x00'*(HdrSize-28)) Header.write(b'\x00'*(HdrSize-28))
Header.write(FwMgrHdr.getvalue()) Header.write(FwMgrHdr.getvalue())
Header.write(Content.getvalue()) Header.write(Content.getvalue())
# #
@@ -246,7 +247,7 @@ class Capsule (CapsuleClassObject) :
def GenCapInf(self): def GenCapInf(self):
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiCapsuleName + "_Cap" + '.inf') self.UiCapsuleName + "_Cap" + '.inf')
CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+') CapInfFile = StringIO() #open (self.CapInfFileName , 'w+')
CapInfFile.writelines("[options]" + T_CHAR_LF) CapInfFile.writelines("[options]" + T_CHAR_LF)

View File

@@ -82,7 +82,7 @@ class CapsuleFv (CapsuleData):
if self.FvName.find('.fv') == -1: if self.FvName.find('.fv') == -1:
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()] FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]
FdBuffer = BytesIO('') FdBuffer = BytesIO()
FvObj.CapsuleName = self.CapsuleName FvObj.CapsuleName = self.CapsuleName
FvFile = FvObj.AddToBuffer(FdBuffer) FvFile = FvObj.AddToBuffer(FdBuffer)
FvObj.CapsuleName = None FvObj.CapsuleName = None

View File

@@ -74,7 +74,7 @@ class FD(FDClassObject):
HasCapsuleRegion = True HasCapsuleRegion = True
break break
if HasCapsuleRegion: if HasCapsuleRegion:
TempFdBuffer = BytesIO('') TempFdBuffer = BytesIO()
PreviousRegionStart = -1 PreviousRegionStart = -1
PreviousRegionSize = 1 PreviousRegionSize = 1
@@ -103,7 +103,7 @@ class FD(FDClassObject):
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function') GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.vtfRawDict, self.DefineVarDict) RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.vtfRawDict, self.DefineVarDict)
FdBuffer = BytesIO('') FdBuffer = BytesIO()
PreviousRegionStart = -1 PreviousRegionStart = -1
PreviousRegionSize = 1 PreviousRegionSize = 1
for RegionObj in self.RegionList : for RegionObj in self.RegionList :

View File

@@ -82,7 +82,7 @@ class FileStatement (FileStatementClassObject) :
Dict.update(self.DefineVarDict) Dict.update(self.DefineVarDict)
SectionAlignments = None SectionAlignments = None
if self.FvName is not None : if self.FvName is not None :
Buffer = BytesIO('') Buffer = BytesIO()
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict: if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName)) EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
@@ -99,7 +99,7 @@ class FileStatement (FileStatementClassObject) :
elif self.FileName is not None: elif self.FileName is not None:
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW': if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment): if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
FileContent = '' FileContent = BytesIO()
MaxAlignIndex = 0 MaxAlignIndex = 0
MaxAlignValue = 1 MaxAlignValue = 1
for Index, File in enumerate(self.FileName): for Index, File in enumerate(self.FileName):
@@ -115,15 +115,15 @@ class FileStatement (FileStatementClassObject) :
if AlignValue > MaxAlignValue: if AlignValue > MaxAlignValue:
MaxAlignIndex = Index MaxAlignIndex = Index
MaxAlignValue = AlignValue MaxAlignValue = AlignValue
FileContent += Content FileContent.write(Content)
if len(FileContent) % AlignValue != 0: if len(FileContent.getvalue()) % AlignValue != 0:
Size = AlignValue - len(FileContent) % AlignValue Size = AlignValue - len(FileContent) % AlignValue
for i in range(0, Size): for i in range(0, Size):
FileContent += pack('B', 0xFF) FileContent.write(pack('B', 0xFF))
if FileContent: if FileContent.getvalue() != b'':
OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw') OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw')
SaveFileOnChange(OutputRAWFile, FileContent, True) SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True)
self.FileName = OutputRAWFile self.FileName = OutputRAWFile
self.SubAlignment = self.SubAlignment[MaxAlignIndex] self.SubAlignment = self.SubAlignment[MaxAlignIndex]

View File

@@ -1086,7 +1086,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName): def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):
# Use a instance of StringIO to cache data # Use a instance of StringIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
for Item in VfrUniOffsetList: for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1): if (Item[0].find("Strings") != -1):
@@ -1096,8 +1096,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
# #
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(bytes(UniGuid))
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16)) UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue) fStringIO.write (UniValue)
else: else:
@@ -1107,8 +1106,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
# #
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(bytes(VfrGuid))
fStringIO.write(''.join(VfrGuid))
type (Item[1]) type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16)) VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue) fStringIO.write (VfrValue)

View File

@@ -18,6 +18,7 @@
import Common.LongFilePathOs as os import Common.LongFilePathOs as os
import subprocess import subprocess
from io import BytesIO from io import BytesIO
from io import StringIO
from struct import * from struct import *
from . import Ffs from . import Ffs
@@ -204,7 +205,7 @@ class FV (FvClassObject):
# PI FvHeader is 0x48 byte # PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48) FvHeaderBuffer = FvFileObj.read(0x48)
# FV alignment position. # FV alignment position.
FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x400:
if FvAlignmentValue >= 0x100000: if FvAlignmentValue >= 0x100000:
if FvAlignmentValue >= 0x1000000: if FvAlignmentValue >= 0x1000000:
@@ -264,7 +265,7 @@ class FV (FvClassObject):
# #
self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiFvName + '.inf') self.UiFvName + '.inf')
self.FvInfFile = BytesIO() self.FvInfFile = StringIO()
# #
# Add [Options] # Add [Options]
@@ -339,7 +340,7 @@ class FV (FvClassObject):
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName)) GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
else: else:
TotalSize = 16 + 4 TotalSize = 16 + 4
Buffer = '' Buffer = bytearray()
if self.UsedSizeEnable: if self.UsedSizeEnable:
TotalSize += (4 + 4) TotalSize += (4 + 4)
## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03 ## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03
@@ -366,7 +367,7 @@ class FV (FvClassObject):
# #
Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002) Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)
+ PackGUID(Guid) + PackGUID(Guid)
+ self.UiFvName) + bytes(self.UiFvName, 'utf-8'))
for Index in range (0, len(self.FvExtEntryType)): for Index in range (0, len(self.FvExtEntryType)):
if self.FvExtEntryType[Index] == 'FILE': if self.FvExtEntryType[Index] == 'FILE':

View File

@@ -98,7 +98,7 @@ class FvImageSection(FvImageSectionClassObject):
# Generate Fv # Generate Fv
# #
if self.FvName is not None: if self.FvName is not None:
Buffer = BytesIO('') Buffer = BytesIO()
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName) Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
if Fv is not None: if Fv is not None:
self.Fv = Fv self.Fv = Fv
@@ -118,7 +118,7 @@ class FvImageSection(FvImageSectionClassObject):
# PI FvHeader is 0x48 byte # PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48) FvHeaderBuffer = FvFileObj.read(0x48)
# FV alignment position. # FV alignment position.
FvAlignmentValue = 1 << (ord (FvHeaderBuffer[0x2E]) & 0x1F) FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
# FvAlignmentValue is larger than or equal to 1K # FvAlignmentValue is larger than or equal to 1K
if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x400:
if FvAlignmentValue >= 0x100000: if FvAlignmentValue >= 0x100000:

View File

@@ -27,6 +27,7 @@ from Workspace.BuildClassObject import PcdClassObject
from . import RuleComplexFile from . import RuleComplexFile
from .EfiSection import EfiSection from .EfiSection import EfiSection
from io import BytesIO from io import BytesIO
from io import StringIO
import Common.TargetTxtClassObject as TargetTxtClassObject import Common.TargetTxtClassObject as TargetTxtClassObject
import Common.ToolDefClassObject as ToolDefClassObject import Common.ToolDefClassObject as ToolDefClassObject
from Common.DataType import * from Common.DataType import *
@@ -454,7 +455,7 @@ class GenFds :
return return
elif GenFds.OnlyGenerateThisFv is None: elif GenFds.OnlyGenerateThisFv is None:
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values(): for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
Buffer = BytesIO('') Buffer = BytesIO()
FvObj.AddToBuffer(Buffer) FvObj.AddToBuffer(Buffer)
Buffer.close() Buffer.close()
@@ -600,7 +601,7 @@ class GenFds :
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj): def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref") GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")
GuidXRefFile = BytesIO('') GuidXRefFile = StringIO('')
PkgGuidDict = {} PkgGuidDict = {}
GuidDict = {} GuidDict = {}
ModuleList = [] ModuleList = []

View File

@@ -720,8 +720,8 @@ class GenFdsGlobalVariable:
return return
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode) GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
GenFdsGlobalVariable.InfLogger (out) GenFdsGlobalVariable.InfLogger (out.decode(encoding='utf-8',errors='ignore'))
GenFdsGlobalVariable.InfLogger (error) GenFdsGlobalVariable.InfLogger (error.decode(encoding='utf-8', errors='ignore'))
if PopenObject.returncode != 0: if PopenObject.returncode != 0:
print("###", cmd) print("###", cmd)
EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess) EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)

View File

@@ -57,8 +57,8 @@ class Region(RegionClassObject):
PadByte = pack('B', 0xFF) PadByte = pack('B', 0xFF)
else: else:
PadByte = pack('B', 0) PadByte = pack('B', 0)
PadData = ''.join(PadByte for i in range(0, Size)) for i in range(0, Size):
Buffer.write(PadData) Buffer.write(PadByte)
## AddToBuffer() ## AddToBuffer()
# #
@@ -127,7 +127,7 @@ class Region(RegionClassObject):
if self.FvAddress % FvAlignValue != 0: if self.FvAddress % FvAlignValue != 0:
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
FvBuffer = BytesIO('') FvBuffer = BytesIO()
FvBaseAddress = '0x%X' % self.FvAddress FvBaseAddress = '0x%X' % self.FvAddress
BlockSize = None BlockSize = None
BlockNum = None BlockNum = None

View File

@@ -120,7 +120,7 @@ if __name__ == '__main__':
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode) sys.exit(Process.returncode)
print(Version[0]) print(Version[0].decode())
# #
# Read input file into a buffer and save input filename # Read input file into a buffer and save input filename

View File

@@ -82,7 +82,7 @@ if __name__ == '__main__':
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode) sys.exit(Process.returncode)
print(Version[0]) print(Version[0].decode())
args.PemFileName = [] args.PemFileName = []
@@ -117,19 +117,19 @@ if __name__ == '__main__':
args.PemFileName.append(Item.name) args.PemFileName.append(Item.name)
Item.close() Item.close()
PublicKeyHash = '' PublicKeyHash = bytearray()
for Item in args.PemFileName: for Item in args.PemFileName:
# #
# Extract public key from private key into STDOUT # Extract public key from private key into STDOUT
# #
Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Unable to extract public key from private key') print('ERROR: Unable to extract public key from private key')
sys.exit(Process.returncode) sys.exit(Process.returncode)
PublicKey = '' PublicKey = bytearray()
for Index in range (0, len(PublicKeyHexString), 2): for Index in range (0, len(PublicKeyHexString), 2):
PublicKey = PublicKey + chr(int(PublicKeyHexString[Index:Index + 2], 16)) PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]
# #
# Generate SHA 256 hash of RSA 2048 bit public key into STDOUT # Generate SHA 256 hash of RSA 2048 bit public key into STDOUT
@@ -155,14 +155,14 @@ if __name__ == '__main__':
# #
PublicKeyHashC = '{' PublicKeyHashC = '{'
for Item in PublicKeyHash: for Item in PublicKeyHash:
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item)) PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)
PublicKeyHashC = PublicKeyHashC[:-2] + '}' PublicKeyHashC = PublicKeyHashC[:-2] + '}'
# #
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file # Write SHA 256 of 2048 bit binary public key to public key hash C structure file
# #
try: try:
args.PublicKeyHashCFile.write (PublicKeyHashC) args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))
args.PublicKeyHashCFile.close () args.PublicKeyHashCFile.close ()
except: except:
pass pass

View File

@@ -103,7 +103,7 @@ if __name__ == '__main__':
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode) sys.exit(Process.returncode)
print(Version[0]) print(Version[0].decode())
# #
# Read input file into a buffer and save input filename # Read input file into a buffer and save input filename
@@ -151,10 +151,11 @@ if __name__ == '__main__':
# Extract public key from private key into STDOUT # Extract public key from private key into STDOUT
# #
Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
PublicKeyHexString = PublicKeyHexString.decode(encoding='utf-8')
PublicKey = '' PublicKey = ''
while len(PublicKeyHexString) > 0: while len(PublicKeyHexString) > 0:
PublicKey = PublicKey + chr(int(PublicKeyHexString[0:2], 16)) PublicKey = PublicKey + PublicKeyHexString[0:2]
PublicKeyHexString=PublicKeyHexString[2:] PublicKeyHexString=PublicKeyHexString[2:]
if Process.returncode != 0: if Process.returncode != 0:
sys.exit(Process.returncode) sys.exit(Process.returncode)
@@ -186,7 +187,7 @@ if __name__ == '__main__':
# #
args.OutputFile = open(args.OutputFileName, 'wb') args.OutputFile = open(args.OutputFileName, 'wb')
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le()) args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le())
args.OutputFile.write(PublicKey) args.OutputFile.write(bytearray.fromhex(PublicKey))
args.OutputFile.write(Signature) args.OutputFile.write(Signature)
args.OutputFile.write(args.InputFileBuffer) args.OutputFile.write(args.InputFileBuffer)
args.OutputFile.close() args.OutputFile.close()
@@ -208,7 +209,7 @@ if __name__ == '__main__':
# #
# Verify the public key # Verify the public key
# #
if Header.PublicKey != PublicKey: if Header.PublicKey != bytearray.fromhex(PublicKey):
print('ERROR: Public key in input file does not match public key from private key file') print('ERROR: Public key in input file does not match public key from private key file')
sys.exit(1) sys.exit(1)

View File

@@ -458,7 +458,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None) EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)
# Use a instance of BytesIO to cache data # Use a instance of BytesIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
for Item in VfrUniOffsetList: for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1): if (Item[0].find("Strings") != -1):
@@ -468,8 +468,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
# #
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(bytes(UniGuid))
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16)) UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue) fStringIO.write (UniValue)
else: else:
@@ -479,9 +478,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
# #
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(bytes(VfrGuid))
fStringIO.write(''.join(VfrGuid))
type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16)) VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue) fStringIO.write (VfrValue)

View File

@@ -680,9 +680,7 @@ def GetHelpTextList(HelpTextClassList):
# @param String: the source string # @param String: the source string
# #
def StringArrayLength(String): def StringArrayLength(String):
if isinstance(String, unicode): if String.startswith('L"'):
return (len(String) + 1) * 2 + 1
elif String.startswith('L"'):
return (len(String) - 3 + 1) * 2 return (len(String) - 3 + 1) * 2
elif String.startswith('"'): elif String.startswith('"'):
return (len(String) - 2 + 1) return (len(String) - 2 + 1)

View File

@@ -130,7 +130,14 @@ def GetDependencyList(FileStack, SearchPathList):
continue continue
if FileContent[0] == 0xff or FileContent[0] == 0xfe: if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = unicode(FileContent, "utf-16") FileContent = str(FileContent, "utf-16")
IncludedFileList = gIncludePattern.findall(FileContent)
else:
try:
FileContent = str(FileContent, "utf-8")
IncludedFileList = gIncludePattern.findall(FileContent)
except:
pass
IncludedFileList = gIncludePattern.findall(FileContent) IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList: for Inc in IncludedFileList:
@@ -1655,7 +1662,7 @@ class DscBuildData(PlatformBuildClassObject):
except: except:
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command) EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command)
Result = Process.communicate() Result = Process.communicate()
return Process.returncode, Result[0], Result[1] return Process.returncode, Result[0].decode(encoding='utf-8', errors='ignore'), Result[1].decode(encoding='utf-8', errors='ignore')
@staticmethod @staticmethod
def IntToCString(Value, ValueSize): def IntToCString(Value, ValueSize):
@@ -2684,7 +2691,7 @@ class DscBuildData(PlatformBuildClassObject):
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {} Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][DefaultStore] = DefaultValue Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][DefaultStore] = DefaultValue
for pcd in Pcds.values(): for pcd in Pcds.values():
SkuInfoObj = pcd.SkuInfoList.values()[0] SkuInfoObj = list(pcd.SkuInfoList.values())[0]
pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName] pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
pcd.DatumType = pcdDecObject.DatumType pcd.DatumType = pcdDecObject.DatumType
# Only fix the value while no value provided in DSC file. # Only fix the value while no value provided in DSC file.

View File

@@ -1931,10 +1931,10 @@ class DecParser(MetaFileParser):
return return
if self._include_flag: if self._include_flag:
self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine).hexdigest() self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine self._ValueList[2] = self._CurrentLine
if self._package_flag and "}" != self._CurrentLine: if self._package_flag and "}" != self._CurrentLine:
self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine).hexdigest() self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine self._ValueList[2] = self._CurrentLine
if self._CurrentLine == "}": if self._CurrentLine == "}":
self._package_flag = False self._package_flag = False

View File

@@ -28,7 +28,7 @@ import hashlib
import subprocess import subprocess
import threading import threading
from datetime import datetime from datetime import datetime
from io import BytesIO from io import StringIO
from Common import EdkLogger from Common import EdkLogger
from Common.Misc import SaveFileOnChange from Common.Misc import SaveFileOnChange
from Common.Misc import GuidStructureByteArrayToGuidString from Common.Misc import GuidStructureByteArrayToGuidString
@@ -641,7 +641,7 @@ class ModuleReport(object):
Match = gTimeStampPattern.search(FileContents) Match = gTimeStampPattern.search(FileContents)
if Match: if Match:
self.BuildTimeStamp = datetime.fromtimestamp(int(Match.group(1))) self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))
except IOError: except IOError:
EdkLogger.warn(None, "Fail to read report file", FwReportFileName) EdkLogger.warn(None, "Fail to read report file", FwReportFileName)
@@ -726,8 +726,8 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time # read one line a time
Line = From.readline() Line = From.readline()
# empty string means "end" # empty string means "end"
if Line is not None and Line != "": if Line is not None and Line != b"":
To(Line.rstrip()) To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))
else: else:
break break
if ExitFlag.isSet(): if ExitFlag.isSet():
@@ -2246,7 +2246,7 @@ class BuildReport(object):
def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime): def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime):
if self.ReportFile: if self.ReportFile:
try: try:
File = BytesIO('') File = StringIO('')
for (Wa, MaList) in self.ReportList: for (Wa, MaList) in self.ReportList:
PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType) PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType)
Content = FileLinesSplit(File.getvalue(), gLineMaxLength) Content = FileLinesSplit(File.getvalue(), gLineMaxLength)

View File

@@ -18,7 +18,7 @@
# #
import Common.LongFilePathOs as os import Common.LongFilePathOs as os
import re import re
from io import BytesIO from io import StringIO
import sys import sys
import glob import glob
import time import time
@@ -242,8 +242,8 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time # read one line a time
Line = From.readline() Line = From.readline()
# empty string means "end" # empty string means "end"
if Line is not None and Line != "": if Line is not None and Line != b"":
To(Line.rstrip()) To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))
else: else:
break break
if ExitFlag.isSet(): if ExitFlag.isSet():
@@ -1780,7 +1780,7 @@ class Build():
if not Ma.IsLibrary: if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma ModuleList[Ma.Guid.upper()] = Ma
MapBuffer = BytesIO('') MapBuffer = StringIO('')
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
# #
# Rebase module to the preferred memory address before GenFds # Rebase module to the preferred memory address before GenFds
@@ -1938,7 +1938,7 @@ class Build():
if not Ma.IsLibrary: if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma ModuleList[Ma.Guid.upper()] = Ma
MapBuffer = BytesIO('') MapBuffer = StringIO('')
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
# #
# Rebase module to the preferred memory address before GenFds # Rebase module to the preferred memory address before GenFds
@@ -2125,7 +2125,7 @@ class Build():
# #
# Rebase module to the preferred memory address before GenFds # Rebase module to the preferred memory address before GenFds
# #
MapBuffer = BytesIO('') MapBuffer = StringIO('')
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
self._CollectModuleMapBuffer(MapBuffer, ModuleList) self._CollectModuleMapBuffer(MapBuffer, ModuleList)