BaseTools: Handle the bytes and str difference

Deal with bytes and str is different, remove the unicode(),
correct open file parameter.
Using utcfromtimestamp instead of fromtimestamp.

Cc: Bob Feng <bob.c.feng@intel.com>
Cc: Liming Gao <liming.gao@intel.com>
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Zhiju.Fan <zhijux.fan@intel.com>
Tested-by: Laszlo Ersek <lersek@redhat.com>
Tested-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Reviewed-by: Liming Gao <liming.gao@intel.com>
Reviewed-by: Bob Feng <bob.c.feng@intel.com>
This commit is contained in:
Feng, Bob C
2019-01-23 10:16:00 +08:00
parent f8d11e5a4a
commit d943b0c339
37 changed files with 247 additions and 244 deletions

View File

@ -726,11 +726,11 @@ class WorkspaceAutoGen(AutoGen):
for files in AllWorkSpaceMetaFiles: for files in AllWorkSpaceMetaFiles:
if files.endswith('.dec'): if files.endswith('.dec'):
continue continue
f = open(files, 'r') f = open(files, 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True) SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), False)
GlobalData.gPlatformHash = m.hexdigest() GlobalData.gPlatformHash = m.hexdigest()
# #
@ -755,7 +755,7 @@ class WorkspaceAutoGen(AutoGen):
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash') HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
m = hashlib.md5() m = hashlib.md5()
# Get .dec file's hash value # Get .dec file's hash value
f = open(Pkg.MetaFile.Path, 'r') f = open(Pkg.MetaFile.Path, 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
@ -765,11 +765,11 @@ class WorkspaceAutoGen(AutoGen):
for Root, Dirs, Files in os.walk(str(inc)): for Root, Dirs, Files in os.walk(str(inc)):
for File in sorted(Files): for File in sorted(Files):
File_Path = os.path.join(Root, File) File_Path = os.path.join(Root, File)
f = open(File_Path, 'r') f = open(File_Path, 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
SaveFileOnChange(HashFile, m.hexdigest(), True) SaveFileOnChange(HashFile, m.hexdigest(), False)
GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest() GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()
def _GetMetaFiles(self, Target, Toolchain, Arch): def _GetMetaFiles(self, Target, Toolchain, Arch):
@ -1736,7 +1736,7 @@ class PlatformAutoGen(AutoGen):
for pcd in self._DynamicPcdList: for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1: if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset: for (SkuName, SkuId) in allskuset:
if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0: if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
continue continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT]) pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId pcd.SkuInfoList[SkuName].SkuId = SkuId
@ -1906,7 +1906,7 @@ class PlatformAutoGen(AutoGen):
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value) ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
ToolsDef += "\n" ToolsDef += "\n"
SaveFileOnChange(self.ToolDefinitionFile, ToolsDef) SaveFileOnChange(self.ToolDefinitionFile, ToolsDef, False)
for DllPath in DllPathList: for DllPath in DllPathList:
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"] os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
os.environ["MAKE_FLAGS"] = MakeFlags os.environ["MAKE_FLAGS"] = MakeFlags
@ -3303,7 +3303,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir) AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringH) RetVal[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "": if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir) AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = UniStringBinBuffer.getvalue() RetVal[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True AutoFile.IsBinary = True
@ -3314,7 +3314,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir) AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringIdf) RetVal[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "": if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir) AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = IdfGenBinBuffer.getvalue() RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True AutoFile.IsBinary = True
@ -3532,7 +3532,7 @@ class ModuleAutoGen(AutoGen):
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None) EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of BytesIO to cache data # Use a instance of BytesIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
for Item in VfrUniOffsetList: for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1): if (Item[0].find("Strings") != -1):
@ -3541,9 +3541,8 @@ class ModuleAutoGen(AutoGen):
# GUID + Offset # GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
# #
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(UniGuid)
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16)) UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue) fStringIO.write (UniValue)
else: else:
@ -3552,9 +3551,8 @@ class ModuleAutoGen(AutoGen):
# GUID + Offset # GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
# #
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(VfrGuid)
fStringIO.write(''.join(VfrGuid))
VfrValue = pack ('Q', int (Item[1], 16)) VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue) fStringIO.write (VfrValue)
# #
@ -4095,29 +4093,29 @@ class ModuleAutoGen(AutoGen):
GlobalData.gModuleHash[self.Arch] = {} GlobalData.gModuleHash[self.Arch] = {}
m = hashlib.md5() m = hashlib.md5()
# Add Platform level hash # Add Platform level hash
m.update(GlobalData.gPlatformHash) m.update(GlobalData.gPlatformHash.encode('utf-8'))
# Add Package level hash # Add Package level hash
if self.DependentPackageList: if self.DependentPackageList:
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName): for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]: if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName]) m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))
# Add Library hash # Add Library hash
if self.LibraryAutoGenList: if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name): for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
if Lib.Name not in GlobalData.gModuleHash[self.Arch]: if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
Lib.GenModuleHash() Lib.GenModuleHash()
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name]) m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
# Add Module self # Add Module self
f = open(str(self.MetaFile), 'r') f = open(str(self.MetaFile), 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
# Add Module's source files # Add Module's source files
if self.SourceFileList: if self.SourceFileList:
for File in sorted(self.SourceFileList, key=lambda x: str(x)): for File in sorted(self.SourceFileList, key=lambda x: str(x)):
f = open(str(File), 'r') f = open(str(File), 'rb')
Content = f.read() Content = f.read()
f.close() f.close()
m.update(Content) m.update(Content)
@ -4128,7 +4126,7 @@ class ModuleAutoGen(AutoGen):
if GlobalData.gBinCacheSource: if GlobalData.gBinCacheSource:
if self.AttemptModuleCacheCopy(): if self.AttemptModuleCacheCopy():
return False return False
return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True) return SaveFileOnChange(ModuleHashFile, m.hexdigest(), False)
## Decide whether we can skip the ModuleAutoGen process ## Decide whether we can skip the ModuleAutoGen process
def CanSkipbyHash(self): def CanSkipbyHash(self):

View File

@ -1782,7 +1782,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
TempBuffer += Buffer TempBuffer += Buffer
elif File.Ext.upper() == '.JPG': elif File.Ext.upper() == '.JPG':
ImageType, = struct.unpack('4s', Buffer[6:10]) ImageType, = struct.unpack('4s', Buffer[6:10])
if ImageType != 'JFIF': if ImageType != b'JFIF':
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path) EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG) TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
TempBuffer += pack('I', len(Buffer)) TempBuffer += pack('I', len(Buffer))
@ -1882,7 +1882,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent): def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2]) ImageType, = struct.unpack('2s', Buffer[0:2])
if ImageType!= 'BM': # BMP file type is 'BM' if ImageType!= b'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path) EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant']) BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII') BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
@ -1954,7 +1954,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
for Index in range(0, len(PaletteBuffer)): for Index in range(0, len(PaletteBuffer)):
if Index % 4 == 3: if Index % 4 == 3:
continue continue
PaletteTemp += PaletteBuffer[Index] PaletteTemp += PaletteBuffer[Index:Index+1]
PaletteBuffer = PaletteTemp[1:] PaletteBuffer = PaletteTemp[1:]
return ImageBuffer, PaletteBuffer return ImageBuffer, PaletteBuffer

View File

@ -1038,17 +1038,21 @@ cleanlib:
CurrentFileDependencyList = DepDb[F] CurrentFileDependencyList = DepDb[F]
else: else:
try: try:
Fd = open(F.Path, 'r') Fd = open(F.Path, 'rb')
FileContent = Fd.read()
Fd.close()
except BaseException as X: except BaseException as X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X)) EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
FileContent = Fd.read()
Fd.close()
if len(FileContent) == 0: if len(FileContent) == 0:
continue continue
if FileContent[0] == 0xff or FileContent[0] == 0xfe: if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = unicode(FileContent, "utf-16") FileContent = FileContent.decode('utf-16')
else:
try:
FileContent = str(FileContent)
except:
pass
IncludedFileList = gIncludePattern.findall(FileContent) IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList: for Inc in IncludedFileList:

View File

@ -295,7 +295,7 @@ class DbItemList:
PackStr = PACK_CODE_BY_SIZE[self.ItemSize] PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = '' Buffer = bytearray()
for Datas in self.RawDataList: for Datas in self.RawDataList:
if type(Datas) in (list, tuple): if type(Datas) in (list, tuple):
for Data in Datas: for Data in Datas:
@ -320,7 +320,7 @@ class DbExMapTblItemList (DbItemList):
DbItemList.__init__(self, ItemSize, DataList, RawDataList) DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self): def PackData(self):
Buffer = '' Buffer = bytearray()
PackStr = "=LHH" PackStr = "=LHH"
for Datas in self.RawDataList: for Datas in self.RawDataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
@ -369,7 +369,7 @@ class DbComItemList (DbItemList):
def PackData(self): def PackData(self):
PackStr = PACK_CODE_BY_SIZE[self.ItemSize] PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = '' Buffer = bytearray()
for DataList in self.RawDataList: for DataList in self.RawDataList:
for Data in DataList: for Data in DataList:
if type(Data) in (list, tuple): if type(Data) in (list, tuple):
@ -390,7 +390,7 @@ class DbVariableTableItemList (DbComItemList):
def PackData(self): def PackData(self):
PackStr = "=LLHHLHH" PackStr = "=LLHHLHH"
Buffer = '' Buffer = bytearray()
for DataList in self.RawDataList: for DataList in self.RawDataList:
for Data in DataList: for Data in DataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
@ -451,7 +451,7 @@ class DbSkuHeadTableItemList (DbItemList):
def PackData(self): def PackData(self):
PackStr = "=LL" PackStr = "=LL"
Buffer = '' Buffer = bytearray()
for Data in self.RawDataList: for Data in self.RawDataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
GetIntegerValue(Data[0]), GetIntegerValue(Data[0]),
@ -473,7 +473,7 @@ class DbSizeTableItemList (DbItemList):
return length * self.ItemSize return length * self.ItemSize
def PackData(self): def PackData(self):
PackStr = "=H" PackStr = "=H"
Buffer = '' Buffer = bytearray()
for Data in self.RawDataList: for Data in self.RawDataList:
Buffer += pack(PackStr, Buffer += pack(PackStr,
GetIntegerValue(Data[0])) GetIntegerValue(Data[0]))
@ -853,8 +853,9 @@ def BuildExDataBase(Dict):
Index = 0 Index = 0
for Item in DbItemTotal: for Item in DbItemTotal:
Index +=1 Index +=1
b = Item.PackData() packdata = Item.PackData()
Buffer += b for i in range(len(packdata)):
Buffer += packdata[i:i + 1]
if Index == InitTableNum: if Index == InitTableNum:
if len(Buffer) % 8: if len(Buffer) % 8:
for num in range(8 - len(Buffer) % 8): for num in range(8 - len(Buffer) % 8):
@ -921,9 +922,9 @@ def CreatePcdDataBase(PcdDBData):
totallenbuff = pack("=L", totallen) totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32] newbuffer = databasebuff[:32]
for i in range(4): for i in range(4):
newbuffer += totallenbuff[i] newbuffer += totallenbuff[i:i+1]
for i in range(36, totallen): for i in range(36, totallen):
newbuffer += databasebuff[i] newbuffer += databasebuff[i:i+1]
return newbuffer return newbuffer
@ -965,8 +966,8 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
for skuname, skuid in DynamicPcdSet_Sku: for skuname, skuid in DynamicPcdSet_Sku:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase) AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = () final_data = ()
for item in PcdDbBuffer: for item in range(len(PcdDbBuffer)):
final_data += unpack("B", item) final_data += unpack("B", PcdDbBuffer[item:item+1])
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data) PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC) PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname, skuid)] = VarCheckTab VarCheckTableData[(skuname, skuid)] = VarCheckTab
@ -978,8 +979,8 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
else: else:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase) AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = () final_data = ()
for item in PcdDbBuffer: for item in range(len(PcdDbBuffer)):
final_data += unpack("B", item) final_data += unpack("B", PcdDbBuffer[item:item + 1])
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data) PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData) return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)

View File

@ -73,8 +73,9 @@ class VariableMgr(object):
fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]] fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]]
fisrtdata = fisrtvalue_list[0] fisrtdata = fisrtvalue_list[0]
fisrtvalue_list = [] fisrtvalue_list = []
for data_byte in pack(fisrtdata_flag, int(fisrtdata, 16) if fisrtdata.upper().startswith('0X') else int(fisrtdata)): pack_data = pack(fisrtdata_flag, int(fisrtdata, 0))
fisrtvalue_list.append(hex(unpack("B", data_byte)[0])) for data_byte in range(len(pack_data)):
fisrtvalue_list.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list
for var_item in sku_var_info_offset_list[1:]: for var_item in sku_var_info_offset_list[1:]:
@ -85,8 +86,9 @@ class VariableMgr(object):
data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]] data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]]
data = CurvalueList[0] data = CurvalueList[0]
CurvalueList = [] CurvalueList = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)): pack_data = pack(data_flag, int(data, 0))
CurvalueList.append(hex(unpack("B", data_byte)[0])) for data_byte in range(len(pack_data)):
CurvalueList.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
if CurOffset > len(newvalue_list): if CurOffset > len(newvalue_list):
newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList
else: else:
@ -123,8 +125,8 @@ class VariableMgr(object):
default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail) default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)
default_data_array = () default_data_array = ()
for item in default_data_buffer: for item in range(len(default_data_buffer)):
default_data_array += unpack("B", item) default_data_array += unpack("B", default_data_buffer[item:item + 1])
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)]) var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
@ -141,8 +143,8 @@ class VariableMgr(object):
others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail) others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)
others_data_array = () others_data_array = ()
for item in others_data_buffer: for item in range(len(others_data_buffer)):
others_data_array += unpack("B", item) others_data_array += unpack("B", others_data_buffer[item:item + 1])
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array) data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
@ -158,7 +160,7 @@ class VariableMgr(object):
return [] return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {}) pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
NvStoreDataBuffer = "" NvStoreDataBuffer = bytearray()
var_data_offset = collections.OrderedDict() var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize offset = NvStorageHeaderSize
for default_data, default_info in pcds_default_data.values(): for default_data, default_info in pcds_default_data.values():
@ -185,7 +187,7 @@ class VariableMgr(object):
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8) nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
data_delta_structure_buffer = "" data_delta_structure_buffer = bytearray()
for skuname, defaultstore in var_data: for skuname, defaultstore in var_data:
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT): if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue continue
@ -216,8 +218,8 @@ class VariableMgr(object):
@staticmethod @staticmethod
def unpack_data(data): def unpack_data(data):
final_data = () final_data = ()
for item in data: for item in range(len(data)):
final_data += unpack("B", item) final_data += unpack("B", data[item:item + 1])
return final_data return final_data
@staticmethod @staticmethod
@ -285,7 +287,7 @@ class VariableMgr(object):
@staticmethod @staticmethod
def PACK_VARIABLES_DATA(var_value,data_type, tail = None): def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
Buffer = "" Buffer = bytearray()
data_len = 0 data_len = 0
if data_type == DataType.TAB_VOID: if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","): for value_char in var_value.strip("{").strip("}").split(","):
@ -315,7 +317,7 @@ class VariableMgr(object):
@staticmethod @staticmethod
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value): def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
Buffer = "" Buffer = bytearray()
Buffer += pack("=L", 4+8+8) Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid)) Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstoragename)) Buffer += pack("=Q", int(defaultstoragename))
@ -340,7 +342,7 @@ class VariableMgr(object):
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list): def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname) skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename) defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
Buffer = "" Buffer = bytearray()
Buffer += pack("=L", 4+8+8) Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid)) Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstorageid)) Buffer += pack("=Q", int(defaultstorageid))
@ -363,7 +365,7 @@ class VariableMgr(object):
@staticmethod @staticmethod
def PACK_VARIABLE_NAME(var_name): def PACK_VARIABLE_NAME(var_name):
Buffer = "" Buffer = bytearray()
for name_char in var_name.strip("{").strip("}").split(","): for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B", int(name_char, 16)) Buffer += pack("=B", int(name_char, 16))

View File

@ -34,7 +34,7 @@ class InfSectionParser():
SectionData = [] SectionData = []
try: try:
FileLinesList = open(self._FilePath, "r", 0).readlines() FileLinesList = open(self._FilePath, "r").readlines()
except BaseException: except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath) EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)

View File

@ -123,7 +123,10 @@ def DecToHexList(Dec, Digit = 8):
# @retval: A list for formatted hex string # @retval: A list for formatted hex string
# #
def AscToHexList(Ascii): def AscToHexList(Ascii):
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii] try:
return ['0x{0:02X}'.format(Item) for Item in Ascii]
except:
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
## Create content of .h file ## Create content of .h file
# #

View File

@ -24,7 +24,7 @@ from io import BytesIO
from Common.BuildToolError import * from Common.BuildToolError import *
from Common.StringUtils import GetLineNo from Common.StringUtils import GetLineNo
from Common.Misc import PathClass from Common.Misc import PathClass
from Common.LongFilePathSupport import LongFilePath, UniToStr from Common.LongFilePathSupport import LongFilePath
from Common.GlobalData import * from Common.GlobalData import *
## ##
# Static definitions # Static definitions
@ -427,7 +427,7 @@ class UniFileClassObject(object):
if EndPos != -1 and EndPos - StartPos == 6 : if EndPos != -1 and EndPos - StartPos == 6 :
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE): if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
EndStr = Line[EndPos: ] EndStr = Line[EndPos: ]
UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape') UniStr = Line[StartPos + 2: EndPos]
if EndStr.startswith(u'\\x') and len(EndStr) >= 7: if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE): if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
Line = Line[0 : StartPos] + UniStr + EndStr Line = Line[0 : StartPos] + UniStr + EndStr

View File

@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
os.mkdir(dest) os.mkdir(dest)
BinFileName = "PcdVarCheck.bin" BinFileName = "PcdVarCheck.bin"
BinFilePath = os.path.join(dest, BinFileName) BinFilePath = os.path.join(dest, BinFileName)
Buffer = '' Buffer = bytearray()
index = 0 index = 0
for var_check_tab in self.var_check_info: for var_check_tab in self.var_check_info:
index += 1 index += 1

View File

@ -186,7 +186,7 @@ class PcdEntry:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno)) "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try: try:
self.PcdValue = pack('%ds' % Size, ValueString) self.PcdValue = pack('%ds' % Size, ValueString.encode('utf-8'))
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
@ -306,7 +306,7 @@ class GenVPD :
self.PcdFixedOffsetSizeList = [] self.PcdFixedOffsetSizeList = []
self.PcdUnknownOffsetList = [] self.PcdUnknownOffsetList = []
try: try:
fInputfile = open(InputFileName, "r", 0) fInputfile = open(InputFileName, "r")
try: try:
self.FileLinesList = fInputfile.readlines() self.FileLinesList = fInputfile.readlines()
except: except:
@ -645,7 +645,7 @@ class GenVPD :
#Open an VPD file to process #Open an VPD file to process
try: try:
fVpdFile = open(BinFileName, "wb", 0) fVpdFile = open(BinFileName, "wb")
except: except:
# Open failed # Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
@ -657,7 +657,7 @@ class GenVPD :
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of BytesIO to cache data # Use a instance of BytesIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
# Write the header of map file. # Write the header of map file.
try : try :

View File

@ -15,7 +15,6 @@ from __future__ import absolute_import
import os import os
from . import LongFilePathOsPath from . import LongFilePathOsPath
from Common.LongFilePathSupport import LongFilePath from Common.LongFilePathSupport import LongFilePath
from Common.LongFilePathSupport import UniToStr
import time import time
path = LongFilePathOsPath path = LongFilePathOsPath
@ -64,7 +63,7 @@ def listdir(path):
List = [] List = []
uList = os.listdir(u"%s" % LongFilePath(path)) uList = os.listdir(u"%s" % LongFilePath(path))
for Item in uList: for Item in uList:
List.append(UniToStr(Item)) List.append(Item)
return List return List
environ = os.environ environ = os.environ

View File

@ -49,15 +49,3 @@ def CopyLongFilePath(src, dst):
with open(LongFilePath(src), 'rb') as fsrc: with open(LongFilePath(src), 'rb') as fsrc:
with open(LongFilePath(dst), 'wb') as fdst: with open(LongFilePath(dst), 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst) shutil.copyfileobj(fsrc, fdst)
## Convert a python unicode string to a normal string
#
# Convert a python unicode string to a normal string
# UniToStr(u'I am a string') is 'I am a string'
#
# @param Uni: The python unicode string
#
# @retval: The formatted normal string
#
def UniToStr(Uni):
return repr(Uni)[2:-1]

View File

@ -456,15 +456,22 @@ def RemoveDirectory(Directory, Recursively=False):
# @retval False If the file content is the same # @retval False If the file content is the same
# #
def SaveFileOnChange(File, Content, IsBinaryFile=True): def SaveFileOnChange(File, Content, IsBinaryFile=True):
if not IsBinaryFile:
Content = Content.replace("\n", os.linesep)
if os.path.exists(File): if os.path.exists(File):
try: if IsBinaryFile:
if Content == open(File, "rb").read(): try:
return False with open(File, "rb") as f:
except: if Content == f.read():
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File) return False
except:
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
else:
try:
with open(File, "r") as f:
if Content == f.read():
return False
except:
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
DirName = os.path.dirname(File) DirName = os.path.dirname(File)
if not CreateDirectory(DirName): if not CreateDirectory(DirName):
@ -475,12 +482,18 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True):
if not os.access(DirName, os.W_OK): if not os.access(DirName, os.W_OK):
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName) EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
try: if IsBinaryFile:
Fd = open(File, "wb") try:
Fd.write(Content) with open(File, "wb") as Fd:
Fd.close() Fd.write(Content)
except IOError as X: except IOError as X:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
else:
try:
with open(File, 'w') as Fd:
Fd.write(Content)
except IOError as X:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
return True return True
@ -1060,7 +1073,10 @@ def ParseFieldValue (Value):
if Value[0] == '"' and Value[-1] == '"': if Value[0] == '"' and Value[-1] == '"':
Value = Value[1:-1] Value = Value[1:-1]
try: try:
Value = "'" + uuid.UUID(Value).bytes_le + "'" Value = str(uuid.UUID(Value).bytes_le)
if Value.startswith("b'"):
Value = Value[2:-1]
Value = "'" + Value + "'"
except ValueError as Message: except ValueError as Message:
raise BadExpression(Message) raise BadExpression(Message)
Value, Size = ParseFieldValue(Value) Value, Size = ParseFieldValue(Value)
@ -1536,7 +1552,7 @@ class PeImageClass():
ByteArray = array.array('B') ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 4) ByteArray.fromfile(PeObject, 4)
# PE signature should be 'PE\0\0' # PE signature should be 'PE\0\0'
if ByteArray.tostring() != 'PE\0\0': if ByteArray.tostring() != b'PE\0\0':
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00' self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
return return
@ -1752,7 +1768,7 @@ class SkuClass():
# @retval Value The integer value that the input represents # @retval Value The integer value that the input represents
# #
def GetIntegerValue(Input): def GetIntegerValue(Input):
if type(Input) in (int, long): if not isinstance(Input, str):
return Input return Input
String = Input String = Input
if String.endswith("U"): if String.endswith("U"):

View File

@ -816,11 +816,7 @@ def GetHelpTextList(HelpTextClassList):
return List return List
def StringToArray(String): def StringToArray(String):
if isinstance(String, unicode): if String.startswith('L"'):
if len(unicode) == 0:
return "{0x00,0x00}"
return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String)
elif String.startswith('L"'):
if String == "L\"\"": if String == "L\"\"":
return "{0x00,0x00}" return "{0x00,0x00}"
else: else:
@ -843,9 +839,7 @@ def StringToArray(String):
return '{%s,0,0}' % ','.join(String.split()) return '{%s,0,0}' % ','.join(String.split())
def StringArrayLength(String): def StringArrayLength(String):
if isinstance(String, unicode): if String.startswith('L"'):
return (len(String) + 1) * 2 + 1;
elif String.startswith('L"'):
return (len(String) - 3 + 1) * 2 return (len(String) - 3 + 1) * 2
elif String.startswith('"'): elif String.startswith('"'):
return (len(String) - 2 + 1) return (len(String) - 2 + 1)

View File

@ -92,18 +92,18 @@ class VpdInfoFile:
if (Vpd is None): if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.") EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == TAB_STAR): if not (Offset >= "0" or Offset == TAB_STAR):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset) EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == TAB_VOID: if Vpd.DatumType == TAB_VOID:
if Vpd.MaxDatumSize <= 0: if Vpd.MaxDatumSize <= "0":
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)) "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES: elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
if not Vpd.MaxDatumSize: if not Vpd.MaxDatumSize:
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType] Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
else: else:
if Vpd.MaxDatumSize <= 0: if Vpd.MaxDatumSize <= "0":
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)) "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
@ -127,7 +127,7 @@ class VpdInfoFile:
"Invalid parameter FilePath: %s." % FilePath) "Invalid parameter FilePath: %s." % FilePath)
Content = FILE_COMMENT_TEMPLATE Content = FILE_COMMENT_TEMPLATE
Pcds = sorted(self._VpdArray.keys()) Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)
for Pcd in Pcds: for Pcd in Pcds:
i = 0 i = 0
PcdTokenCName = Pcd.TokenCName PcdTokenCName = Pcd.TokenCName
@ -249,7 +249,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
except Exception as X: except Exception as X:
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X)) EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
(out, error) = PopenObject.communicate() (out, error) = PopenObject.communicate()
print(out) print(out.decode(encoding='utf-8', errors='ignore'))
while PopenObject.returncode is None : while PopenObject.returncode is None :
PopenObject.wait() PopenObject.wait()

View File

@ -53,7 +53,7 @@ class AprioriSection (object):
# @retval string Generated file name # @retval string Generated file name
# #
def GenFfs (self, FvName, Dict = {}, IsMakefile = False): def GenFfs (self, FvName, Dict = {}, IsMakefile = False):
Buffer = BytesIO('') Buffer = BytesIO()
if self.AprioriType == "PEI": if self.AprioriType == "PEI":
AprioriFileGuid = PEI_APRIORI_GUID AprioriFileGuid = PEI_APRIORI_GUID
else: else:

View File

@ -181,7 +181,7 @@ class Capsule (CapsuleClassObject):
# #
# The real capsule header structure is 28 bytes # The real capsule header structure is 28 bytes
# #
Header.write('\x00'*(HdrSize-28)) Header.write(b'\x00'*(HdrSize-28))
Header.write(FwMgrHdr.getvalue()) Header.write(FwMgrHdr.getvalue())
Header.write(Content.getvalue()) Header.write(Content.getvalue())
# #
@ -206,18 +206,17 @@ class Capsule (CapsuleClassObject):
return self.GenFmpCapsule() return self.GenFmpCapsule()
CapInfFile = self.GenCapInf() CapInfFile = self.GenCapInf()
CapInfFile.writelines("[files]" + TAB_LINE_BREAK) CapInfFile.append("[files]" + TAB_LINE_BREAK)
CapFileList = [] CapFileList = []
for CapsuleDataObj in self.CapsuleDataList: for CapsuleDataObj in self.CapsuleDataList:
CapsuleDataObj.CapsuleName = self.CapsuleName CapsuleDataObj.CapsuleName = self.CapsuleName
FileName = CapsuleDataObj.GenCapsuleSubItem() FileName = CapsuleDataObj.GenCapsuleSubItem()
CapsuleDataObj.CapsuleName = None CapsuleDataObj.CapsuleName = None
CapFileList.append(FileName) CapFileList.append(FileName)
CapInfFile.writelines("EFI_FILE_NAME = " + \ CapInfFile.append("EFI_FILE_NAME = " + \
FileName + \ FileName + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
SaveFileOnChange(self.CapInfFileName, CapInfFile.getvalue(), False) SaveFileOnChange(self.CapInfFileName, ''.join(CapInfFile), False)
CapInfFile.close()
# #
# Call GenFv tool to generate capsule # Call GenFv tool to generate capsule
# #
@ -243,12 +242,12 @@ class Capsule (CapsuleClassObject):
def GenCapInf(self): def GenCapInf(self):
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiCapsuleName + "_Cap" + '.inf') self.UiCapsuleName + "_Cap" + '.inf')
CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+') CapInfFile = []
CapInfFile.writelines("[options]" + TAB_LINE_BREAK) CapInfFile.append("[options]" + TAB_LINE_BREAK)
for Item in self.TokensDict: for Item in self.TokensDict:
CapInfFile.writelines("EFI_" + \ CapInfFile.append("EFI_" + \
Item + \ Item + \
' = ' + \ ' = ' + \
self.TokensDict[Item] + \ self.TokensDict[Item] + \

View File

@ -82,7 +82,7 @@ class CapsuleFv (CapsuleData):
if self.FvName.find('.fv') == -1: if self.FvName.find('.fv') == -1:
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()] FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]
FdBuffer = BytesIO('') FdBuffer = BytesIO()
FvObj.CapsuleName = self.CapsuleName FvObj.CapsuleName = self.CapsuleName
FvFile = FvObj.AddToBuffer(FdBuffer) FvFile = FvObj.AddToBuffer(FdBuffer)
FvObj.CapsuleName = None FvObj.CapsuleName = None

View File

@ -72,7 +72,7 @@ class FD(FDClassObject):
HasCapsuleRegion = True HasCapsuleRegion = True
break break
if HasCapsuleRegion: if HasCapsuleRegion:
TempFdBuffer = BytesIO('') TempFdBuffer = BytesIO()
PreviousRegionStart = -1 PreviousRegionStart = -1
PreviousRegionSize = 1 PreviousRegionSize = 1
@ -101,7 +101,7 @@ class FD(FDClassObject):
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function') GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict) RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
FdBuffer = BytesIO('') FdBuffer = BytesIO()
PreviousRegionStart = -1 PreviousRegionStart = -1
PreviousRegionSize = 1 PreviousRegionSize = 1
for RegionObj in self.RegionList : for RegionObj in self.RegionList :

View File

@ -159,7 +159,7 @@ class IncludeFileProfile:
self.FileName = FileName self.FileName = FileName
self.FileLinesList = [] self.FileLinesList = []
try: try:
with open(FileName, "rb", 0) as fsock: with open(FileName, "r") as fsock:
self.FileLinesList = fsock.readlines() self.FileLinesList = fsock.readlines()
for index, line in enumerate(self.FileLinesList): for index, line in enumerate(self.FileLinesList):
if not line.endswith(TAB_LINE_BREAK): if not line.endswith(TAB_LINE_BREAK):
@ -213,7 +213,7 @@ class FileProfile:
def __init__(self, FileName): def __init__(self, FileName):
self.FileLinesList = [] self.FileLinesList = []
try: try:
with open(FileName, "rb", 0) as fsock: with open(FileName, "r") as fsock:
self.FileLinesList = fsock.readlines() self.FileLinesList = fsock.readlines()
except: except:

View File

@ -79,7 +79,7 @@ class FileStatement (FileStatementClassObject):
Dict.update(self.DefineVarDict) Dict.update(self.DefineVarDict)
SectionAlignments = None SectionAlignments = None
if self.FvName: if self.FvName:
Buffer = BytesIO('') Buffer = BytesIO()
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict: if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName)) EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
@ -96,7 +96,7 @@ class FileStatement (FileStatementClassObject):
elif self.FileName: elif self.FileName:
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW': if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment): if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
FileContent = '' FileContent = BytesIO()
MaxAlignIndex = 0 MaxAlignIndex = 0
MaxAlignValue = 1 MaxAlignValue = 1
for Index, File in enumerate(self.FileName): for Index, File in enumerate(self.FileName):
@ -112,15 +112,15 @@ class FileStatement (FileStatementClassObject):
if AlignValue > MaxAlignValue: if AlignValue > MaxAlignValue:
MaxAlignIndex = Index MaxAlignIndex = Index
MaxAlignValue = AlignValue MaxAlignValue = AlignValue
FileContent += Content FileContent.write(Content)
if len(FileContent) % AlignValue != 0: if len(FileContent.getvalue()) % AlignValue != 0:
Size = AlignValue - len(FileContent) % AlignValue Size = AlignValue - len(FileContent.getvalue()) % AlignValue
for i in range(0, Size): for i in range(0, Size):
FileContent += pack('B', 0xFF) FileContent.write(pack('B', 0xFF))
if FileContent: if FileContent.getvalue() != b'':
OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw') OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw')
SaveFileOnChange(OutputRAWFile, FileContent, True) SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True)
self.FileName = OutputRAWFile self.FileName = OutputRAWFile
self.SubAlignment = self.SubAlignment[MaxAlignIndex] self.SubAlignment = self.SubAlignment[MaxAlignIndex]

View File

@ -1088,7 +1088,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName): def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):
# Use a instance of StringIO to cache data # Use a instance of StringIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
for Item in VfrUniOffsetList: for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1): if (Item[0].find("Strings") != -1):
@ -1097,9 +1097,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
# GUID + Offset # GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
# #
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(UniGuid)
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16)) UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue) fStringIO.write (UniValue)
else: else:
@ -1108,9 +1107,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
# GUID + Offset # GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
# #
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(VfrGuid)
fStringIO.write(''.join(VfrGuid))
type (Item[1]) type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16)) VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue) fStringIO.write (VfrValue)

View File

@ -117,7 +117,7 @@ class FV (object):
FfsFileList.append(FileName) FfsFileList.append(FileName)
# Add Apriori file name to Inf file # Add Apriori file name to Inf file
if not Flag: if not Flag:
self.FvInfFile.writelines("EFI_FILE_NAME = " + \ self.FvInfFile.append("EFI_FILE_NAME = " + \
FileName + \ FileName + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
@ -131,12 +131,12 @@ class FV (object):
FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress, IsMakefile=Flag, FvName=self.UiFvName) FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress, IsMakefile=Flag, FvName=self.UiFvName)
FfsFileList.append(FileName) FfsFileList.append(FileName)
if not Flag: if not Flag:
self.FvInfFile.writelines("EFI_FILE_NAME = " + \ self.FvInfFile.append("EFI_FILE_NAME = " + \
FileName + \ FileName + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
if not Flag: if not Flag:
SaveFileOnChange(self.InfFileName, self.FvInfFile.getvalue(), False) FvInfFile = ''.join(self.FvInfFile)
self.FvInfFile.close() SaveFileOnChange(self.InfFileName, FvInfFile, False)
# #
# Call GenFv tool # Call GenFv tool
# #
@ -208,14 +208,14 @@ class FV (object):
# PI FvHeader is 0x48 byte # PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48) FvHeaderBuffer = FvFileObj.read(0x48)
Signature = FvHeaderBuffer[0x28:0x32] Signature = FvHeaderBuffer[0x28:0x32]
if Signature and Signature.startswith('_FVH'): if Signature and Signature.startswith(b'_FVH'):
GenFdsGlobalVariable.VerboseLogger("\nGenerate %s FV Successfully" % self.UiFvName) GenFdsGlobalVariable.VerboseLogger("\nGenerate %s FV Successfully" % self.UiFvName)
GenFdsGlobalVariable.SharpCounter = 0 GenFdsGlobalVariable.SharpCounter = 0
FvFileObj.seek(0) FvFileObj.seek(0)
Buffer.write(FvFileObj.read()) Buffer.write(FvFileObj.read())
# FV alignment position. # FV alignment position.
FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E:0x2F]) & 0x1F)
if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x400:
if FvAlignmentValue >= 0x100000: if FvAlignmentValue >= 0x100000:
if FvAlignmentValue >= 0x1000000: if FvAlignmentValue >= 0x1000000:
@ -276,54 +276,54 @@ class FV (object):
# #
self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiFvName + '.inf') self.UiFvName + '.inf')
self.FvInfFile = BytesIO() self.FvInfFile = []
# #
# Add [Options] # Add [Options]
# #
self.FvInfFile.writelines("[options]" + TAB_LINE_BREAK) self.FvInfFile.append("[options]" + TAB_LINE_BREAK)
if BaseAddress is not None: if BaseAddress is not None:
self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \ self.FvInfFile.append("EFI_BASE_ADDRESS = " + \
BaseAddress + \ BaseAddress + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
if BlockSize is not None: if BlockSize is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \ self.FvInfFile.append("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize + \ '0x%X' %BlockSize + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
if BlockNum is not None: if BlockNum is not None:
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \ self.FvInfFile.append("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockNum + \ ' 0x%X' %BlockNum + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
else: else:
if self.BlockSizeList == []: if self.BlockSizeList == []:
if not self._GetBlockSize(): if not self._GetBlockSize():
#set default block size is 1 #set default block size is 1
self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK) self.FvInfFile.append("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)
for BlockSize in self.BlockSizeList: for BlockSize in self.BlockSizeList:
if BlockSize[0] is not None: if BlockSize[0] is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \ self.FvInfFile.append("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize[0] + \ '0x%X' %BlockSize[0] + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
if BlockSize[1] is not None: if BlockSize[1] is not None:
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \ self.FvInfFile.append("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockSize[1] + \ ' 0x%X' %BlockSize[1] + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
if self.BsBaseAddress is not None: if self.BsBaseAddress is not None:
self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \ self.FvInfFile.append('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.BsBaseAddress) '0x%X' %self.BsBaseAddress)
if self.RtBaseAddress is not None: if self.RtBaseAddress is not None:
self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \ self.FvInfFile.append('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.RtBaseAddress) '0x%X' %self.RtBaseAddress)
# #
# Add attribute # Add attribute
# #
self.FvInfFile.writelines("[attributes]" + TAB_LINE_BREAK) self.FvInfFile.append("[attributes]" + TAB_LINE_BREAK)
self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \ self.FvInfFile.append("EFI_ERASE_POLARITY = " + \
' %s' %ErasePloarity + \ ' %s' %ErasePloarity + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
if not (self.FvAttributeDict is None): if not (self.FvAttributeDict is None):
@ -332,13 +332,13 @@ class FV (object):
if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1'): if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1'):
self.UsedSizeEnable = True self.UsedSizeEnable = True
continue continue
self.FvInfFile.writelines("EFI_" + \ self.FvInfFile.append("EFI_" + \
FvAttribute + \ FvAttribute + \
' = ' + \ ' = ' + \
self.FvAttributeDict[FvAttribute] + \ self.FvAttributeDict[FvAttribute] + \
TAB_LINE_BREAK ) TAB_LINE_BREAK )
if self.FvAlignment is not None: if self.FvAlignment is not None:
self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \ self.FvInfFile.append("EFI_FVB2_ALIGNMENT_" + \
self.FvAlignment.strip() + \ self.FvAlignment.strip() + \
" = TRUE" + \ " = TRUE" + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
@ -351,7 +351,7 @@ class FV (object):
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName)) GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
else: else:
TotalSize = 16 + 4 TotalSize = 16 + 4
Buffer = '' Buffer = bytearray()
if self.UsedSizeEnable: if self.UsedSizeEnable:
TotalSize += (4 + 4) TotalSize += (4 + 4)
## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03 ## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03
@ -378,7 +378,7 @@ class FV (object):
# #
Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002) Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)
+ PackGUID(Guid) + PackGUID(Guid)
+ self.UiFvName) + self.UiFvName.encode('utf-8'))
for Index in range (0, len(self.FvExtEntryType)): for Index in range (0, len(self.FvExtEntryType)):
if self.FvExtEntryType[Index] == 'FILE': if self.FvExtEntryType[Index] == 'FILE':
@ -425,11 +425,11 @@ class FV (object):
if Changed: if Changed:
if os.path.exists (self.InfFileName): if os.path.exists (self.InfFileName):
os.remove (self.InfFileName) os.remove (self.InfFileName)
self.FvInfFile.writelines("EFI_FV_EXT_HEADER_FILE_NAME = " + \ self.FvInfFile.append("EFI_FV_EXT_HEADER_FILE_NAME = " + \
FvExtHeaderFileName + \ FvExtHeaderFileName + \
TAB_LINE_BREAK) TAB_LINE_BREAK)
# #
# Add [Files] # Add [Files]
# #
self.FvInfFile.writelines("[files]" + TAB_LINE_BREAK) self.FvInfFile.append("[files]" + TAB_LINE_BREAK)

View File

@ -102,7 +102,7 @@ class FvImageSection(FvImageSectionClassObject):
# Generate Fv # Generate Fv
# #
if self.FvName is not None: if self.FvName is not None:
Buffer = BytesIO('') Buffer = BytesIO()
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName) Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
if Fv is not None: if Fv is not None:
self.Fv = Fv self.Fv = Fv

View File

@ -522,7 +522,7 @@ class GenFds(object):
return return
elif GenFds.OnlyGenerateThisFv is None: elif GenFds.OnlyGenerateThisFv is None:
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values(): for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
Buffer = BytesIO('') Buffer = BytesIO()
FvObj.AddToBuffer(Buffer) FvObj.AddToBuffer(Buffer)
Buffer.close() Buffer.close()
@ -673,7 +673,7 @@ class GenFds(object):
@staticmethod @staticmethod
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj): def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref") GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")
GuidXRefFile = BytesIO('') GuidXRefFile = []
PkgGuidDict = {} PkgGuidDict = {}
GuidDict = {} GuidDict = {}
ModuleList = [] ModuleList = []
@ -700,9 +700,9 @@ class GenFds(object):
else: else:
ModuleList.append(Module) ModuleList.append(Module)
if GlobalData.gGuidPattern.match(ModuleFile.BaseName): if GlobalData.gGuidPattern.match(ModuleFile.BaseName):
GuidXRefFile.write("%s %s\n" % (ModuleFile.BaseName, Module.BaseName)) GuidXRefFile.append("%s %s\n" % (ModuleFile.BaseName, Module.BaseName))
else: else:
GuidXRefFile.write("%s %s\n" % (Module.Guid, Module.BaseName)) GuidXRefFile.append("%s %s\n" % (Module.Guid, Module.BaseName))
GuidDict.update(Module.Protocols) GuidDict.update(Module.Protocols)
GuidDict.update(Module.Guids) GuidDict.update(Module.Guids)
GuidDict.update(Module.Ppis) GuidDict.update(Module.Ppis)
@ -715,7 +715,7 @@ class GenFds(object):
continue continue
else: else:
ModuleList.append(FdfModule) ModuleList.append(FdfModule)
GuidXRefFile.write("%s %s\n" % (FdfModule.Guid, FdfModule.BaseName)) GuidXRefFile.append("%s %s\n" % (FdfModule.Guid, FdfModule.BaseName))
GuidDict.update(FdfModule.Protocols) GuidDict.update(FdfModule.Protocols)
GuidDict.update(FdfModule.Guids) GuidDict.update(FdfModule.Guids)
GuidDict.update(FdfModule.Ppis) GuidDict.update(FdfModule.Ppis)
@ -776,19 +776,19 @@ class GenFds(object):
continue continue
Name = ' '.join(Name) if isinstance(Name, type([])) else Name Name = ' '.join(Name) if isinstance(Name, type([])) else Name
GuidXRefFile.write("%s %s\n" %(FileStatementGuid, Name)) GuidXRefFile.append("%s %s\n" %(FileStatementGuid, Name))
# Append GUIDs, Protocols, and PPIs to the Xref file # Append GUIDs, Protocols, and PPIs to the Xref file
GuidXRefFile.write("\n") GuidXRefFile.append("\n")
for key, item in GuidDict.items(): for key, item in GuidDict.items():
GuidXRefFile.write("%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key)) GuidXRefFile.append("%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key))
if GuidXRefFile.getvalue(): if GuidXRefFile:
SaveFileOnChange(GuidXRefFileName, GuidXRefFile.getvalue(), False) GuidXRefFile = ''.join(GuidXRefFile)
SaveFileOnChange(GuidXRefFileName, GuidXRefFile, False)
GenFdsGlobalVariable.InfLogger("\nGUID cross reference file can be found at %s" % GuidXRefFileName) GenFdsGlobalVariable.InfLogger("\nGUID cross reference file can be found at %s" % GuidXRefFileName)
elif os.path.exists(GuidXRefFileName): elif os.path.exists(GuidXRefFileName):
os.remove(GuidXRefFileName) os.remove(GuidXRefFileName)
GuidXRefFile.close()
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -722,8 +722,8 @@ class GenFdsGlobalVariable:
return return
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode) GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
GenFdsGlobalVariable.InfLogger (out) GenFdsGlobalVariable.InfLogger(out.decode(encoding='utf-8', errors='ignore'))
GenFdsGlobalVariable.InfLogger (error) GenFdsGlobalVariable.InfLogger(error.decode(encoding='utf-8', errors='ignore'))
if PopenObject.returncode != 0: if PopenObject.returncode != 0:
print("###", cmd) print("###", cmd)
EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess) EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)

View File

@ -62,8 +62,8 @@ class Region(object):
PadByte = pack('B', 0xFF) PadByte = pack('B', 0xFF)
else: else:
PadByte = pack('B', 0) PadByte = pack('B', 0)
PadData = ''.join(PadByte for i in range(0, Size)) for i in range(0, Size):
Buffer.write(PadData) Buffer.write(PadByte)
## AddToBuffer() ## AddToBuffer()
# #
@ -131,7 +131,7 @@ class Region(object):
if self.FvAddress % FvAlignValue != 0: if self.FvAddress % FvAlignValue != 0:
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
FvBuffer = BytesIO('') FvBuffer = BytesIO()
FvBaseAddress = '0x%X' % self.FvAddress FvBaseAddress = '0x%X' % self.FvAddress
BlockSize = None BlockSize = None
BlockNum = None BlockNum = None

View File

@ -122,7 +122,7 @@ if __name__ == '__main__':
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode) sys.exit(Process.returncode)
print(Version[0]) print(Version[0].decode())
# #
# Read input file into a buffer and save input filename # Read input file into a buffer and save input filename

View File

@ -84,7 +84,7 @@ if __name__ == '__main__':
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode) sys.exit(Process.returncode)
print(Version[0]) print(Version[0].decode())
args.PemFileName = [] args.PemFileName = []
@ -119,19 +119,19 @@ if __name__ == '__main__':
args.PemFileName.append(Item.name) args.PemFileName.append(Item.name)
Item.close() Item.close()
PublicKeyHash = '' PublicKeyHash = bytearray()
for Item in args.PemFileName: for Item in args.PemFileName:
# #
# Extract public key from private key into STDOUT # Extract public key from private key into STDOUT
# #
Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Unable to extract public key from private key') print('ERROR: Unable to extract public key from private key')
sys.exit(Process.returncode) sys.exit(Process.returncode)
PublicKey = '' PublicKey = bytearray()
for Index in range (0, len(PublicKeyHexString), 2): for Index in range (0, len(PublicKeyHexString), 2):
PublicKey = PublicKey + chr(int(PublicKeyHexString[Index:Index + 2], 16)) PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]
# #
# Generate SHA 256 hash of RSA 2048 bit public key into STDOUT # Generate SHA 256 hash of RSA 2048 bit public key into STDOUT
@ -157,14 +157,14 @@ if __name__ == '__main__':
# #
PublicKeyHashC = '{' PublicKeyHashC = '{'
for Item in PublicKeyHash: for Item in PublicKeyHash:
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item)) PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)
PublicKeyHashC = PublicKeyHashC[:-2] + '}' PublicKeyHashC = PublicKeyHashC[:-2] + '}'
# #
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file # Write SHA 256 of 2048 bit binary public key to public key hash C structure file
# #
try: try:
args.PublicKeyHashCFile.write (PublicKeyHashC) args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))
args.PublicKeyHashCFile.close () args.PublicKeyHashCFile.close ()
except: except:
pass pass

View File

@ -105,7 +105,7 @@ if __name__ == '__main__':
if Process.returncode != 0: if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH') print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode) sys.exit(Process.returncode)
print(Version[0]) print(Version[0].decode('utf-8'))
# #
# Read input file into a buffer and save input filename # Read input file into a buffer and save input filename
@ -153,7 +153,8 @@ if __name__ == '__main__':
# Extract public key from private key into STDOUT # Extract public key from private key into STDOUT
# #
Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
PublicKeyHexString = PublicKeyHexString.decode('utf-8')
PublicKey = '' PublicKey = ''
while len(PublicKeyHexString) > 0: while len(PublicKeyHexString) > 0:
PublicKey = PublicKey + PublicKeyHexString[0:2] PublicKey = PublicKey + PublicKeyHexString[0:2]
@ -210,7 +211,7 @@ if __name__ == '__main__':
# #
# Verify the public key # Verify the public key
# #
if Header.PublicKey != PublicKey: if Header.PublicKey != bytearray.fromhex(PublicKey):
print('ERROR: Public key in input file does not match public key from private key file') print('ERROR: Public key in input file does not match public key from private key file')
sys.exit(1) sys.exit(1)

View File

@ -245,7 +245,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
# save to file # save to file
try: try:
f = open (Target, 'wb') f = open (Target, 'w')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
f.writelines(NewLines) f.writelines(NewLines)
@ -458,7 +458,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None) EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)
# Use a instance of BytesIO to cache data # Use a instance of BytesIO to cache data
fStringIO = BytesIO('') fStringIO = BytesIO()
for Item in VfrUniOffsetList: for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1): if (Item[0].find("Strings") != -1):
@ -467,9 +467,8 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
# GUID + Offset # GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
# #
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(UniGuid)
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16)) UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue) fStringIO.write (UniValue)
else: else:
@ -478,9 +477,8 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
# GUID + Offset # GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
# #
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(VfrGuid)
fStringIO.write(''.join(VfrGuid))
type (Item[1]) type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16)) VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue) fStringIO.write (VfrValue)
@ -562,7 +560,7 @@ def TrimEdkSourceCode(Source, Target):
CreateDirectory(os.path.dirname(Target)) CreateDirectory(os.path.dirname(Target))
try: try:
f = open (Source, 'rb') f = open (Source, 'r')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# read whole file # read whole file
@ -581,7 +579,7 @@ def TrimEdkSourceCode(Source, Target):
return return
try: try:
f = open (Target, 'wb') f = open (Target, 'w')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
f.write(NewLines) f.write(NewLines)

View File

@ -679,9 +679,7 @@ def GetHelpTextList(HelpTextClassList):
# @param String: the source string # @param String: the source string
# #
def StringArrayLength(String): def StringArrayLength(String):
if isinstance(String, unicode): if String.startswith('L"'):
return (len(String) + 1) * 2 + 1
elif String.startswith('L"'):
return (len(String) - 3 + 1) * 2 return (len(String) - 3 + 1) * 2
elif String.startswith('"'): elif String.startswith('"'):
return (len(String) - 2 + 1) return (len(String) - 2 + 1)

View File

@ -94,13 +94,13 @@ class PcdClassObject(object):
deme = ArrayIndex.findall(demesionattr) deme = ArrayIndex.findall(demesionattr)
for i in range(len(deme)-1): for i in range(len(deme)-1):
if int(deme[i].lstrip("[").rstrip("]").strip()) > int(self._Capacity[i]): if int(deme[i].lstrip("[").rstrip("]").strip()) > int(self._Capacity[i]):
print "error" print ("error")
if hasattr(self,"DefaultValues"): if hasattr(self,"DefaultValues"):
for demesionattr in self.DefaultValues: for demesionattr in self.DefaultValues:
deme = ArrayIndex.findall(demesionattr) deme = ArrayIndex.findall(demesionattr)
for i in range(len(deme)-1): for i in range(len(deme)-1):
if int(deme[i].lstrip("[").rstrip("]").strip()) > int(self._Capacity[i]): if int(deme[i].lstrip("[").rstrip("]").strip()) > int(self._Capacity[i]):
print "error" print ("error")
return self._Capacity return self._Capacity
@property @property
def DatumType(self): def DatumType(self):

View File

@ -156,7 +156,14 @@ def GetDependencyList(FileStack, SearchPathList):
continue continue
if FileContent[0] == 0xff or FileContent[0] == 0xfe: if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = unicode(FileContent, "utf-16") FileContent = FileContent.decode('utf-16')
IncludedFileList = gIncludePattern.findall(FileContent)
else:
try:
FileContent = str(FileContent)
IncludedFileList = gIncludePattern.findall(FileContent)
except:
pass
IncludedFileList = gIncludePattern.findall(FileContent) IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList: for Inc in IncludedFileList:
@ -1615,7 +1622,7 @@ class DscBuildData(PlatformBuildClassObject):
FdfInfList = GlobalData.gFdfParser.Profile.InfList FdfInfList = GlobalData.gFdfParser.Profile.InfList
FdfModuleList = [PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch) for Inf in FdfInfList] FdfModuleList = [PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch) for Inf in FdfInfList]
AllModulePcds = set() AllModulePcds = set()
ModuleSet = set(self._Modules.keys() + FdfModuleList) ModuleSet = set(list(self._Modules.keys()) + FdfModuleList)
for ModuleFile in ModuleSet: for ModuleFile in ModuleSet:
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain] ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
AllModulePcds = AllModulePcds | ModuleData.PcdsName AllModulePcds = AllModulePcds | ModuleData.PcdsName
@ -1743,7 +1750,7 @@ class DscBuildData(PlatformBuildClassObject):
except: except:
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command) EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command)
Result = Process.communicate() Result = Process.communicate()
return Process.returncode, Result[0], Result[1] return Process.returncode, Result[0].decode(encoding='utf-8', errors='ignore'), Result[1].decode(encoding='utf-8', errors='ignore')
@staticmethod @staticmethod
def IntToCString(Value, ValueSize): def IntToCString(Value, ValueSize):

View File

@ -1999,10 +1999,10 @@ class DecParser(MetaFileParser):
return return
if self._include_flag: if self._include_flag:
self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine).hexdigest() self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine self._ValueList[2] = self._CurrentLine
if self._package_flag and "}" != self._CurrentLine: if self._package_flag and "}" != self._CurrentLine:
self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine).hexdigest() self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine self._ValueList[2] = self._CurrentLine
if self._CurrentLine == "}": if self._CurrentLine == "}":
self._package_flag = False self._package_flag = False

View File

@ -143,7 +143,7 @@ VPDPcdList = []
def FileWrite(File, String, Wrapper=False): def FileWrite(File, String, Wrapper=False):
if Wrapper: if Wrapper:
String = textwrap.fill(String, 120) String = textwrap.fill(String, 120)
File.write(String + gEndOfLine) File.append(String + gEndOfLine)
def ByteArrayForamt(Value): def ByteArrayForamt(Value):
IsByteArray = False IsByteArray = False
@ -636,7 +636,7 @@ class ModuleReport(object):
Match = gTimeStampPattern.search(FileContents) Match = gTimeStampPattern.search(FileContents)
if Match: if Match:
self.BuildTimeStamp = datetime.fromtimestamp(int(Match.group(1))) self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))
except IOError: except IOError:
EdkLogger.warn(None, "Fail to read report file", FwReportFileName) EdkLogger.warn(None, "Fail to read report file", FwReportFileName)
@ -721,8 +721,8 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time # read one line a time
Line = From.readline() Line = From.readline()
# empty string means "end" # empty string means "end"
if Line is not None and Line != "": if Line is not None and Line != b"":
To(Line.rstrip()) To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))
else: else:
break break
if ExitFlag.isSet(): if ExitFlag.isSet():
@ -2269,18 +2269,17 @@ class BuildReport(object):
def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime): def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime):
if self.ReportFile: if self.ReportFile:
try: try:
File = BytesIO('') File = []
for (Wa, MaList) in self.ReportList: for (Wa, MaList) in self.ReportList:
PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType) PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType)
Content = FileLinesSplit(File.getvalue(), gLineMaxLength) Content = FileLinesSplit(''.join(File), gLineMaxLength)
SaveFileOnChange(self.ReportFile, Content, True) SaveFileOnChange(self.ReportFile, Content, False)
EdkLogger.quiet("Build report can be found at %s" % os.path.abspath(self.ReportFile)) EdkLogger.quiet("Build report can be found at %s" % os.path.abspath(self.ReportFile))
except IOError: except IOError:
EdkLogger.error(None, FILE_WRITE_FAILURE, ExtraData=self.ReportFile) EdkLogger.error(None, FILE_WRITE_FAILURE, ExtraData=self.ReportFile)
except: except:
EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report", ExtraData=self.ReportFile, RaiseError=False) EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report", ExtraData=self.ReportFile, RaiseError=False)
EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc())) EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
File.close()
# This acts like the main() function for the script, unless it is 'import'ed into another script. # This acts like the main() function for the script, unless it is 'import'ed into another script.
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -20,7 +20,6 @@
from __future__ import print_function from __future__ import print_function
import Common.LongFilePathOs as os import Common.LongFilePathOs as os
import re import re
from io import BytesIO
import sys import sys
import glob import glob
import time import time
@ -182,8 +181,8 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time # read one line a time
Line = From.readline() Line = From.readline()
# empty string means "end" # empty string means "end"
if Line is not None and Line != "": if Line is not None and Line != b"":
To(Line.rstrip()) To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))
else: else:
break break
if ExitFlag.isSet(): if ExitFlag.isSet():
@ -1410,11 +1409,11 @@ class Build():
# Add general information. # Add general information.
# #
if ModeIsSmm: if ModeIsSmm:
MapBuffer.write('\n\n%s (Fixed SMRAM Offset, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint)) MapBuffer.append('\n\n%s (Fixed SMRAM Offset, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))
elif AddrIsOffset: elif AddrIsOffset:
MapBuffer.write('\n\n%s (Fixed Memory Offset, BaseAddress=-0x%010X, EntryPoint=-0x%010X)\n' % (ModuleName, 0 - BaseAddress, 0 - (BaseAddress + ModuleInfo.Image.EntryPoint))) MapBuffer.append('\n\n%s (Fixed Memory Offset, BaseAddress=-0x%010X, EntryPoint=-0x%010X)\n' % (ModuleName, 0 - BaseAddress, 0 - (BaseAddress + ModuleInfo.Image.EntryPoint)))
else: else:
MapBuffer.write('\n\n%s (Fixed Memory Address, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint)) MapBuffer.append('\n\n%s (Fixed Memory Address, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))
# #
# Add guid and general seciton section. # Add guid and general seciton section.
# #
@ -1426,21 +1425,21 @@ class Build():
elif SectionHeader[0] in ['.data', '.sdata']: elif SectionHeader[0] in ['.data', '.sdata']:
DataSectionAddress = SectionHeader[1] DataSectionAddress = SectionHeader[1]
if AddrIsOffset: if AddrIsOffset:
MapBuffer.write('(GUID=%s, .textbaseaddress=-0x%010X, .databaseaddress=-0x%010X)\n' % (ModuleInfo.Guid, 0 - (BaseAddress + TextSectionAddress), 0 - (BaseAddress + DataSectionAddress))) MapBuffer.append('(GUID=%s, .textbaseaddress=-0x%010X, .databaseaddress=-0x%010X)\n' % (ModuleInfo.Guid, 0 - (BaseAddress + TextSectionAddress), 0 - (BaseAddress + DataSectionAddress)))
else: else:
MapBuffer.write('(GUID=%s, .textbaseaddress=0x%010X, .databaseaddress=0x%010X)\n' % (ModuleInfo.Guid, BaseAddress + TextSectionAddress, BaseAddress + DataSectionAddress)) MapBuffer.append('(GUID=%s, .textbaseaddress=0x%010X, .databaseaddress=0x%010X)\n' % (ModuleInfo.Guid, BaseAddress + TextSectionAddress, BaseAddress + DataSectionAddress))
# #
# Add debug image full path. # Add debug image full path.
# #
MapBuffer.write('(IMAGE=%s)\n\n' % (ModuleDebugImage)) MapBuffer.append('(IMAGE=%s)\n\n' % (ModuleDebugImage))
# #
# Add funtion address # Add funtion address
# #
for Function in FunctionList: for Function in FunctionList:
if AddrIsOffset: if AddrIsOffset:
MapBuffer.write(' -0x%010X %s\n' % (0 - (BaseAddress + Function[1]), Function[0])) MapBuffer.append(' -0x%010X %s\n' % (0 - (BaseAddress + Function[1]), Function[0]))
else: else:
MapBuffer.write(' 0x%010X %s\n' % (BaseAddress + Function[1], Function[0])) MapBuffer.append(' 0x%010X %s\n' % (BaseAddress + Function[1], Function[0]))
ImageMap.close() ImageMap.close()
# #
@ -1475,7 +1474,7 @@ class Build():
GuidString = MatchGuid.group() GuidString = MatchGuid.group()
if GuidString.upper() in ModuleList: if GuidString.upper() in ModuleList:
Line = Line.replace(GuidString, ModuleList[GuidString.upper()].Name) Line = Line.replace(GuidString, ModuleList[GuidString.upper()].Name)
MapBuffer.write(Line) MapBuffer.append(Line)
# #
# Add the debug image full path. # Add the debug image full path.
# #
@ -1483,7 +1482,7 @@ class Build():
if MatchGuid is not None: if MatchGuid is not None:
GuidString = MatchGuid.group().split("=")[1] GuidString = MatchGuid.group().split("=")[1]
if GuidString.upper() in ModuleList: if GuidString.upper() in ModuleList:
MapBuffer.write('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi'))) MapBuffer.append('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))
FvMap.close() FvMap.close()
@ -1599,11 +1598,11 @@ class Build():
if ReturnValue != 0: if ReturnValue != 0:
EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo) EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)
MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize // 0x1000)) MapBuffer.append('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize // 0x1000))
MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize // 0x1000)) MapBuffer.append('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize // 0x1000))
MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize // 0x1000)) MapBuffer.append('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize // 0x1000))
if len (SmmModuleList) > 0: if len (SmmModuleList) > 0:
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize // 0x1000)) MapBuffer.append('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize // 0x1000))
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
BtBaseAddr = TopMemoryAddress - RtSize BtBaseAddr = TopMemoryAddress - RtSize
@ -1613,7 +1612,7 @@ class Build():
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True) self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)
MapBuffer.write('\n\n') MapBuffer.append('\n\n')
sys.stdout.write ("\n") sys.stdout.write ("\n")
sys.stdout.flush() sys.stdout.flush()
@ -1627,8 +1626,7 @@ class Build():
# #
# Save address map into MAP file. # Save address map into MAP file.
# #
SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False) SaveFileOnChange(MapFilePath, ''.join(MapBuffer), False)
MapBuffer.close()
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath)) sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))
sys.stdout.flush() sys.stdout.flush()
@ -1703,7 +1701,7 @@ class Build():
if not Ma.IsLibrary: if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma ModuleList[Ma.Guid.upper()] = Ma
MapBuffer = BytesIO('') MapBuffer = []
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
# #
# Rebase module to the preferred memory address before GenFds # Rebase module to the preferred memory address before GenFds
@ -1861,7 +1859,7 @@ class Build():
if not Ma.IsLibrary: if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma ModuleList[Ma.Guid.upper()] = Ma
MapBuffer = BytesIO('') MapBuffer = []
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
# #
# Rebase module to the preferred memory address before GenFds # Rebase module to the preferred memory address before GenFds
@ -2042,7 +2040,7 @@ class Build():
# #
# Rebase module to the preferred memory address before GenFds # Rebase module to the preferred memory address before GenFds
# #
MapBuffer = BytesIO('') MapBuffer = []
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
self._CollectModuleMapBuffer(MapBuffer, ModuleList) self._CollectModuleMapBuffer(MapBuffer, ModuleList)