BaseTools: Handle the bytes and str difference

Deal with bytes and str is different, remove the unicode(),
correct open file parameter.
Using utcfromtimestamp instead of fromtimestamp.

Cc: Bob Feng <bob.c.feng@intel.com>
Cc: Liming Gao <liming.gao@intel.com>
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Zhiju.Fan <zhijux.fan@intel.com>
Tested-by: Laszlo Ersek <lersek@redhat.com>
Tested-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Reviewed-by: Liming Gao <liming.gao@intel.com>
Reviewed-by: Bob Feng <bob.c.feng@intel.com>
This commit is contained in:
Feng, Bob C
2019-01-23 10:16:00 +08:00
parent f8d11e5a4a
commit d943b0c339
37 changed files with 247 additions and 244 deletions

View File

@ -726,11 +726,11 @@ class WorkspaceAutoGen(AutoGen):
for files in AllWorkSpaceMetaFiles:
if files.endswith('.dec'):
continue
f = open(files, 'r')
f = open(files, 'rb')
Content = f.read()
f.close()
m.update(Content)
SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)
SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)
GlobalData.gPlatformHash = m.hexdigest()
#
@ -755,7 +755,7 @@ class WorkspaceAutoGen(AutoGen):
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
m = hashlib.md5()
# Get .dec file's hash value
f = open(Pkg.MetaFile.Path, 'r')
f = open(Pkg.MetaFile.Path, 'rb')
Content = f.read()
f.close()
m.update(Content)
@ -765,11 +765,11 @@ class WorkspaceAutoGen(AutoGen):
for Root, Dirs, Files in os.walk(str(inc)):
for File in sorted(Files):
File_Path = os.path.join(Root, File)
f = open(File_Path, 'r')
f = open(File_Path, 'rb')
Content = f.read()
f.close()
m.update(Content)
SaveFileOnChange(HashFile, m.hexdigest(), True)
SaveFileOnChange(HashFile, m.hexdigest(), False)
GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()
def _GetMetaFiles(self, Target, Toolchain, Arch):
@ -1736,7 +1736,7 @@ class PlatformAutoGen(AutoGen):
for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset:
if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:
if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId
@ -1906,7 +1906,7 @@ class PlatformAutoGen(AutoGen):
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
ToolsDef += "\n"
SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)
SaveFileOnChange(self.ToolDefinitionFile, ToolsDef, False)
for DllPath in DllPathList:
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
os.environ["MAKE_FLAGS"] = MakeFlags
@ -3303,7 +3303,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True
@ -3314,7 +3314,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True
@ -3532,7 +3532,7 @@ class ModuleAutoGen(AutoGen):
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO('')
fStringIO = BytesIO()
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
@ -3541,9 +3541,8 @@ class ModuleAutoGen(AutoGen):
# GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
fStringIO.write(''.join(UniGuid))
UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
fStringIO.write(UniGuid)
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@ -3552,9 +3551,8 @@ class ModuleAutoGen(AutoGen):
# GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
fStringIO.write(''.join(VfrGuid))
VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
fStringIO.write(VfrGuid)
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
@ -4095,29 +4093,29 @@ class ModuleAutoGen(AutoGen):
GlobalData.gModuleHash[self.Arch] = {}
m = hashlib.md5()
# Add Platform level hash
m.update(GlobalData.gPlatformHash)
m.update(GlobalData.gPlatformHash.encode('utf-8'))
# Add Package level hash
if self.DependentPackageList:
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))
# Add Library hash
if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
Lib.GenModuleHash()
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
# Add Module self
f = open(str(self.MetaFile), 'r')
f = open(str(self.MetaFile), 'rb')
Content = f.read()
f.close()
m.update(Content)
# Add Module's source files
if self.SourceFileList:
for File in sorted(self.SourceFileList, key=lambda x: str(x)):
f = open(str(File), 'r')
f = open(str(File), 'rb')
Content = f.read()
f.close()
m.update(Content)
@ -4128,7 +4126,7 @@ class ModuleAutoGen(AutoGen):
if GlobalData.gBinCacheSource:
if self.AttemptModuleCacheCopy():
return False
return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)
return SaveFileOnChange(ModuleHashFile, m.hexdigest(), False)
## Decide whether we can skip the ModuleAutoGen process
def CanSkipbyHash(self):

View File

@ -1782,7 +1782,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
TempBuffer += Buffer
elif File.Ext.upper() == '.JPG':
ImageType, = struct.unpack('4s', Buffer[6:10])
if ImageType != 'JFIF':
if ImageType != b'JFIF':
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
TempBuffer += pack('I', len(Buffer))
@ -1882,7 +1882,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2])
if ImageType!= 'BM': # BMP file type is 'BM'
if ImageType!= b'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
@ -1954,7 +1954,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
for Index in range(0, len(PaletteBuffer)):
if Index % 4 == 3:
continue
PaletteTemp += PaletteBuffer[Index]
PaletteTemp += PaletteBuffer[Index:Index+1]
PaletteBuffer = PaletteTemp[1:]
return ImageBuffer, PaletteBuffer

View File

@ -1038,17 +1038,21 @@ cleanlib:
CurrentFileDependencyList = DepDb[F]
else:
try:
Fd = open(F.Path, 'r')
Fd = open(F.Path, 'rb')
FileContent = Fd.read()
Fd.close()
except BaseException as X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
FileContent = Fd.read()
Fd.close()
if len(FileContent) == 0:
continue
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = unicode(FileContent, "utf-16")
FileContent = FileContent.decode('utf-16')
else:
try:
FileContent = str(FileContent)
except:
pass
IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList:

View File

@ -295,7 +295,7 @@ class DbItemList:
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = ''
Buffer = bytearray()
for Datas in self.RawDataList:
if type(Datas) in (list, tuple):
for Data in Datas:
@ -320,7 +320,7 @@ class DbExMapTblItemList (DbItemList):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self):
Buffer = ''
Buffer = bytearray()
PackStr = "=LHH"
for Datas in self.RawDataList:
Buffer += pack(PackStr,
@ -369,7 +369,7 @@ class DbComItemList (DbItemList):
def PackData(self):
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = ''
Buffer = bytearray()
for DataList in self.RawDataList:
for Data in DataList:
if type(Data) in (list, tuple):
@ -390,7 +390,7 @@ class DbVariableTableItemList (DbComItemList):
def PackData(self):
PackStr = "=LLHHLHH"
Buffer = ''
Buffer = bytearray()
for DataList in self.RawDataList:
for Data in DataList:
Buffer += pack(PackStr,
@ -451,7 +451,7 @@ class DbSkuHeadTableItemList (DbItemList):
def PackData(self):
PackStr = "=LL"
Buffer = ''
Buffer = bytearray()
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
@ -473,7 +473,7 @@ class DbSizeTableItemList (DbItemList):
return length * self.ItemSize
def PackData(self):
PackStr = "=H"
Buffer = ''
Buffer = bytearray()
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]))
@ -853,8 +853,9 @@ def BuildExDataBase(Dict):
Index = 0
for Item in DbItemTotal:
Index +=1
b = Item.PackData()
Buffer += b
packdata = Item.PackData()
for i in range(len(packdata)):
Buffer += packdata[i:i + 1]
if Index == InitTableNum:
if len(Buffer) % 8:
for num in range(8 - len(Buffer) % 8):
@ -921,9 +922,9 @@ def CreatePcdDataBase(PcdDBData):
totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32]
for i in range(4):
newbuffer += totallenbuff[i]
newbuffer += totallenbuff[i:i+1]
for i in range(36, totallen):
newbuffer += databasebuff[i]
newbuffer += databasebuff[i:i+1]
return newbuffer
@ -965,8 +966,8 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
for skuname, skuid in DynamicPcdSet_Sku:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = ()
for item in PcdDbBuffer:
final_data += unpack("B", item)
for item in range(len(PcdDbBuffer)):
final_data += unpack("B", PcdDbBuffer[item:item+1])
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname, skuid)] = VarCheckTab
@ -978,8 +979,8 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
else:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = ()
for item in PcdDbBuffer:
final_data += unpack("B", item)
for item in range(len(PcdDbBuffer)):
final_data += unpack("B", PcdDbBuffer[item:item + 1])
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)

View File

@ -73,8 +73,9 @@ class VariableMgr(object):
fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]]
fisrtdata = fisrtvalue_list[0]
fisrtvalue_list = []
for data_byte in pack(fisrtdata_flag, int(fisrtdata, 16) if fisrtdata.upper().startswith('0X') else int(fisrtdata)):
fisrtvalue_list.append(hex(unpack("B", data_byte)[0]))
pack_data = pack(fisrtdata_flag, int(fisrtdata, 0))
for data_byte in range(len(pack_data)):
fisrtvalue_list.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list
for var_item in sku_var_info_offset_list[1:]:
@ -85,8 +86,9 @@ class VariableMgr(object):
data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]]
data = CurvalueList[0]
CurvalueList = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
CurvalueList.append(hex(unpack("B", data_byte)[0]))
pack_data = pack(data_flag, int(data, 0))
for data_byte in range(len(pack_data)):
CurvalueList.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
if CurOffset > len(newvalue_list):
newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList
else:
@ -123,8 +125,8 @@ class VariableMgr(object):
default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)
default_data_array = ()
for item in default_data_buffer:
default_data_array += unpack("B", item)
for item in range(len(default_data_buffer)):
default_data_array += unpack("B", default_data_buffer[item:item + 1])
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
@ -141,8 +143,8 @@ class VariableMgr(object):
others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)
others_data_array = ()
for item in others_data_buffer:
others_data_array += unpack("B", item)
for item in range(len(others_data_buffer)):
others_data_array += unpack("B", others_data_buffer[item:item + 1])
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
@ -158,7 +160,7 @@ class VariableMgr(object):
return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
NvStoreDataBuffer = ""
NvStoreDataBuffer = bytearray()
var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize
for default_data, default_info in pcds_default_data.values():
@ -185,7 +187,7 @@ class VariableMgr(object):
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
data_delta_structure_buffer = ""
data_delta_structure_buffer = bytearray()
for skuname, defaultstore in var_data:
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue
@ -216,8 +218,8 @@ class VariableMgr(object):
@staticmethod
def unpack_data(data):
final_data = ()
for item in data:
final_data += unpack("B", item)
for item in range(len(data)):
final_data += unpack("B", data[item:item + 1])
return final_data
@staticmethod
@ -285,7 +287,7 @@ class VariableMgr(object):
@staticmethod
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
Buffer = ""
Buffer = bytearray()
data_len = 0
if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","):
@ -315,7 +317,7 @@ class VariableMgr(object):
@staticmethod
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
Buffer = ""
Buffer = bytearray()
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstoragename))
@ -340,7 +342,7 @@ class VariableMgr(object):
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
Buffer = ""
Buffer = bytearray()
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstorageid))
@ -363,7 +365,7 @@ class VariableMgr(object):
@staticmethod
def PACK_VARIABLE_NAME(var_name):
Buffer = ""
Buffer = bytearray()
for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B", int(name_char, 16))

View File

@ -34,7 +34,7 @@ class InfSectionParser():
SectionData = []
try:
FileLinesList = open(self._FilePath, "r", 0).readlines()
FileLinesList = open(self._FilePath, "r").readlines()
except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)

View File

@ -123,7 +123,10 @@ def DecToHexList(Dec, Digit = 8):
# @retval: A list for formatted hex string
#
def AscToHexList(Ascii):
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
try:
return ['0x{0:02X}'.format(Item) for Item in Ascii]
except:
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
## Create content of .h file
#

View File

@ -24,7 +24,7 @@ from io import BytesIO
from Common.BuildToolError import *
from Common.StringUtils import GetLineNo
from Common.Misc import PathClass
from Common.LongFilePathSupport import LongFilePath, UniToStr
from Common.LongFilePathSupport import LongFilePath
from Common.GlobalData import *
##
# Static definitions
@ -427,7 +427,7 @@ class UniFileClassObject(object):
if EndPos != -1 and EndPos - StartPos == 6 :
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
EndStr = Line[EndPos: ]
UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')
UniStr = Line[StartPos + 2: EndPos]
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
Line = Line[0 : StartPos] + UniStr + EndStr

View File

@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
os.mkdir(dest)
BinFileName = "PcdVarCheck.bin"
BinFilePath = os.path.join(dest, BinFileName)
Buffer = ''
Buffer = bytearray()
index = 0
for var_check_tab in self.var_check_info:
index += 1