Revert BaseTools: PYTHON3 migration

This reverts commit 6693f359b3c213513c5096a06c6f67244a44dc52..
678f851312.

Python3 migration is the fundamental change. It requires every developer
to install Python3. Before this migration, the well communication and wide
verification must be done. But now, most people is not aware of this change,
and not try it. So, Python3 migration is reverted and be moved to edk2-staging
Python3 branch for the edk2 user evaluation.

Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Liming Gao <liming.gao@intel.com>
This commit is contained in:
Liming Gao
2018-10-15 08:27:53 +08:00
parent 678f851312
commit 1ccc4d895d
182 changed files with 48049 additions and 15099 deletions

View File

@ -15,6 +15,8 @@
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
import os.path as path
@ -198,11 +200,11 @@ class AutoGen(object):
# if it exists, just return it directly
return cls.__ObjectCache[Key]
# it didnt exist. create it, cache it, then return it
RetVal = cls.__ObjectCache[Key] = super().__new__(cls)
RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)
return RetVal
def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
super().__init__()
super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
## hash() operator
#
@ -235,7 +237,7 @@ class WorkspaceAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
super().__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._Init = True
@ -295,7 +297,7 @@ class WorkspaceAutoGen(AutoGen):
SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
% (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
self.ArchList = tuple(sorted(ArchList))
self.ArchList = tuple(ArchList)
# Validate build target
if self.BuildTarget not in self.Platform.BuildTargets:
@ -518,7 +520,7 @@ class WorkspaceAutoGen(AutoGen):
for BuildData in PGen.BuildDatabase._CACHE_.values():
if BuildData.Arch != Arch:
continue
for key in list(BuildData.Pcds.keys()):
for key in BuildData.Pcds:
for SinglePcd in GlobalData.MixedPcd:
if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
for item in GlobalData.MixedPcd[SinglePcd]:
@ -616,17 +618,17 @@ class WorkspaceAutoGen(AutoGen):
#
content = 'gCommandLineDefines: '
content += str(GlobalData.gCommandLineDefines)
content += "\n"
content += os.linesep
content += 'BuildOptionPcd: '
content += str(GlobalData.BuildOptionPcd)
content += "\n"
content += os.linesep
content += 'Active Platform: '
content += str(self.Platform)
content += "\n"
content += os.linesep
if self.FdfFile:
content += 'Flash Image Definition: '
content += str(self.FdfFile)
content += "\n"
content += os.linesep
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
#
@ -636,7 +638,7 @@ class WorkspaceAutoGen(AutoGen):
if Pa.PcdTokenNumber:
if Pa.DynamicPcdList:
for Pcd in Pa.DynamicPcdList:
PcdTokenNumber += "\n"
PcdTokenNumber += os.linesep
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
PcdTokenNumber += ' : '
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
@ -661,7 +663,7 @@ class WorkspaceAutoGen(AutoGen):
for files in AllWorkSpaceMetaFiles:
if files.endswith('.dec'):
continue
f = open(files, 'rb')
f = open(files, 'r')
Content = f.read()
f.close()
m.update(Content)
@ -677,7 +679,7 @@ class WorkspaceAutoGen(AutoGen):
if not os.path.exists(self.BuildDir):
os.makedirs(self.BuildDir)
with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
for f in sorted(AllWorkSpaceMetaFiles):
for f in AllWorkSpaceMetaFiles:
print(f, file=file)
return True
@ -690,7 +692,7 @@ class WorkspaceAutoGen(AutoGen):
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
m = hashlib.md5()
# Get .dec file's hash value
f = open(Pkg.MetaFile.Path, 'rb')
f = open(Pkg.MetaFile.Path, 'r')
Content = f.read()
f.close()
m.update(Content)
@ -700,7 +702,7 @@ class WorkspaceAutoGen(AutoGen):
for Root, Dirs, Files in os.walk(str(inc)):
for File in sorted(Files):
File_Path = os.path.join(Root, File)
f = open(File_Path, 'rb')
f = open(File_Path, 'r')
Content = f.read()
f.close()
m.update(Content)
@ -864,7 +866,7 @@ class WorkspaceAutoGen(AutoGen):
def _CheckAllPcdsTokenValueConflict(self):
for Pa in self.AutoGenObjectList:
for Package in Pa.PackageList:
PcdList = list(Package.Pcds.values())
PcdList = Package.Pcds.values()
PcdList.sort(key=lambda x: int(x.TokenValue, 0))
Count = 0
while (Count < len(PcdList) - 1) :
@ -910,7 +912,7 @@ class WorkspaceAutoGen(AutoGen):
Count += SameTokenValuePcdListCount
Count += 1
PcdList = list(Package.Pcds.values())
PcdList = Package.Pcds.values()
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
Count = 0
while (Count < len(PcdList) - 1) :
@ -973,7 +975,7 @@ class PlatformAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
super().__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
self._Init = True
#
@ -1179,7 +1181,7 @@ class PlatformAutoGen(AutoGen):
if os.path.exists(VpdMapFilePath):
OrgVpdFile.Read(VpdMapFilePath)
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'
else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
@ -1229,7 +1231,7 @@ class PlatformAutoGen(AutoGen):
FdfModuleList.append(os.path.normpath(InfName))
for M in self._MaList:
# F is the Module for which M is the module autogen
for PcdFromModule in list(M.ModulePcdList) + list(M.LibraryPcdList):
for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
@ -1378,7 +1380,7 @@ class PlatformAutoGen(AutoGen):
if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = list(Pcd.SkuInfoList.values())[0]
Sku = Pcd.SkuInfoList.values()[0]
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
@ -1477,7 +1479,7 @@ class PlatformAutoGen(AutoGen):
if not FoundFlag :
# just pick the a value to determine whether is unicode string type
SkuValueMap = {}
SkuObjList = list(DscPcdEntry.SkuInfoList.items())
SkuObjList = DscPcdEntry.SkuInfoList.items()
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku:
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
@ -1503,7 +1505,7 @@ class PlatformAutoGen(AutoGen):
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
# Only fix the value while no value provided in DSC file.
if not Sku.DefaultValue:
DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
if DscPcdEntry not in self._DynamicPcdList:
self._DynamicPcdList.append(DscPcdEntry)
@ -1579,7 +1581,7 @@ class PlatformAutoGen(AutoGen):
# Delete the DynamicPcdList At the last time enter into this function
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = list(Pcd.SkuInfoList.values())[0]
Sku = Pcd.SkuInfoList.values()[0]
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
@ -1598,14 +1600,11 @@ class PlatformAutoGen(AutoGen):
self._DynamicPcdList.extend(list(UnicodePcdArray))
self._DynamicPcdList.extend(list(HiiPcdArray))
self._DynamicPcdList.extend(list(OtherPcdArray))
#python3.6 set is not ordered at all
self._DynamicPcdList = sorted(self._DynamicPcdList, key=lambda x:(x.TokenSpaceGuidCName, x.TokenCName))
self._NonDynamicPcdList = sorted(self._NonDynamicPcdList, key=lambda x: (x.TokenSpaceGuidCName, x.TokenCName))
allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset:
if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:
continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId
@ -2147,7 +2146,7 @@ class PlatformAutoGen(AutoGen):
Pcd.MaxDatumSize = str(len(Value.split(',')))
else:
Pcd.MaxDatumSize = str(len(Value) - 1)
return list(Pcds.values())
return Pcds.values()
## Resolve library names to library modules
#
@ -2251,7 +2250,7 @@ class PlatformAutoGen(AutoGen):
# Use the highest priority value.
#
if (len(OverrideList) >= 2):
KeyList = list(OverrideList.keys())
KeyList = OverrideList.keys()
for Index in range(len(KeyList)):
NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
@ -2373,11 +2372,11 @@ class PlatformAutoGen(AutoGen):
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
BuildRuleOrder = Options[Tool][Attr]
AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
list(self.ToolDefinition.keys()))
AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +
PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +
self.ToolDefinition.keys())
BuildOptions = defaultdict(lambda: defaultdict(str))
for Tool in sorted(AllTools):
for Tool in AllTools:
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
if Tool not in Options:
continue
@ -2428,7 +2427,7 @@ class ModuleAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
super().__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
self._Init = True
@ -2442,7 +2441,7 @@ class ModuleAutoGen(AutoGen):
EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
% (MetaFile, Arch))
return None
return super().__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
## Initialize ModuleAutoGen
#
@ -3159,12 +3158,12 @@ class ModuleAutoGen(AutoGen):
@cached_property
def IntroTargetList(self):
self.Targets
return sorted(self._IntroBuildTargetList, key=lambda x: str(x.Target))
return self._IntroBuildTargetList
@cached_property
def CodaTargetList(self):
self.Targets
return sorted(self._FinalBuildTargetList, key=lambda x: str(x.Target))
return self._FinalBuildTargetList
@cached_property
def FileTypes(self):
@ -3210,7 +3209,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True
@ -3221,7 +3220,7 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True
@ -3439,7 +3438,7 @@ class ModuleAutoGen(AutoGen):
return None
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
if not VfrUniOffsetList:
return None
@ -3452,7 +3451,7 @@ class ModuleAutoGen(AutoGen):
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
fStringIO = BytesIO('')
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
@ -3462,7 +3461,8 @@ class ModuleAutoGen(AutoGen):
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
fStringIO.write(bytes(UniGuid))
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@ -3472,7 +3472,8 @@ class ModuleAutoGen(AutoGen):
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
fStringIO.write(bytes(VfrGuid))
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
fStringIO.write(''.join(VfrGuid))
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
@ -3524,7 +3525,7 @@ class ModuleAutoGen(AutoGen):
Packages = []
PcdCheckList = []
PcdTokenSpaceList = []
for Pcd in list(self.ModulePcdList) + list(self.LibraryPcdList):
for Pcd in self.ModulePcdList + self.LibraryPcdList:
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
PatchablePcds.append(Pcd)
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
@ -3636,10 +3637,6 @@ class ModuleAutoGen(AutoGen):
AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
else:
AsBuiltInfDict['binary_item'].append('BIN|' + File)
if not self.DepexGenerated:
DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
if os.path.exists(DepexFile):
self.DepexGenerated = True
if self.DepexGenerated:
self.OutputFile.add(self.Name + '.depex')
if self.ModuleType in [SUP_MODULE_PEIM]:
@ -3736,7 +3733,7 @@ class ModuleAutoGen(AutoGen):
Padding = '0x00, '
if Unicode:
Padding = Padding * 2
ArraySize = ArraySize // 2
ArraySize = ArraySize / 2
if ArraySize < (len(PcdValue) + 1):
if Pcd.MaxSizeUserSet:
EdkLogger.error("build", AUTOGEN_ERROR,
@ -3896,7 +3893,7 @@ class ModuleAutoGen(AutoGen):
if os.path.exists (self.TimeStampPath):
os.remove (self.TimeStampPath)
with open(self.TimeStampPath, 'w+') as file:
for f in sorted(FileSet):
for f in FileSet:
print(f, file=file)
# Ignore generating makefile when it is a binary module
@ -4024,29 +4021,29 @@ class ModuleAutoGen(AutoGen):
GlobalData.gModuleHash[self.Arch] = {}
m = hashlib.md5()
# Add Platform level hash
m.update(GlobalData.gPlatformHash.encode('utf-8'))
m.update(GlobalData.gPlatformHash)
# Add Package level hash
if self.DependentPackageList:
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])
# Add Library hash
if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
Lib.GenModuleHash()
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])
# Add Module self
f = open(str(self.MetaFile), 'rb')
f = open(str(self.MetaFile), 'r')
Content = f.read()
f.close()
m.update(Content)
# Add Module's source files
if self.SourceFileList:
for File in sorted(self.SourceFileList, key=lambda x: str(x)):
f = open(str(File), 'rb')
f = open(str(File), 'r')
Content = f.read()
f.close()
m.update(Content)

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os
import re
import copy

View File

@ -13,6 +13,7 @@
## Import Modules
#
from __future__ import absolute_import
import string
import collections
import struct
@ -936,7 +937,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
else:
PcdList = list(Info.ModulePcdList) + list(Info.LibraryPcdList)
PcdList = Info.ModulePcdList + Info.LibraryPcdList
PcdExCNameTest = 0
for PcdModule in PcdList:
if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
@ -970,7 +971,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
PcdCNameTest = 0
for PcdModule in list(Info.LibraryPcdList) + list(Info.ModulePcdList):
for PcdModule in Info.LibraryPcdList + Info.ModulePcdList:
if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
PcdCNameTest += 1
# get out early once we found > 1...
@ -1011,10 +1012,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
Value = Value[:-1]
ValueNumber = int (Value, 0)
except:
try:
ValueNumber = int(Value.lstrip('0'))
except:
EdkLogger.error("build", AUTOGEN_ERROR,
EdkLogger.error("build", AUTOGEN_ERROR,
"PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
if ValueNumber < 0:
@ -1053,7 +1051,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
else:
NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
if Unicode:
ArraySize = ArraySize // 2
ArraySize = ArraySize / 2
Value = NewValue + '0 }'
if ArraySize < ValueSize:
if Pcd.MaxSizeUserSet:
@ -1063,7 +1061,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
else:
ArraySize = Pcd.GetPcdSize()
if Unicode:
ArraySize = ArraySize // 2
ArraySize = ArraySize / 2
Array = '[%d]' % ArraySize
#
# skip casting for fixed at build since it breaks ARM assembly.
@ -1798,7 +1796,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
TempBuffer += Buffer
elif File.Ext.upper() == '.JPG':
ImageType, = struct.unpack('4s', Buffer[6:10])
if ImageType != b'JFIF':
if ImageType != 'JFIF':
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
TempBuffer += pack('I', len(Buffer))
@ -1898,7 +1896,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2])
if ImageType!= b'BM': # BMP file type is 'BM'
if ImageType!= 'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
@ -1922,7 +1920,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
ImageBuffer += pack('B', PaletteIndex)
Width = (BmpHeader.biWidth + 7)//8
Width = (BmpHeader.biWidth + 7)/8
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 4:
@ -1931,7 +1929,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
ImageBuffer += pack('B', PaletteIndex)
Width = (BmpHeader.biWidth + 1)//2
Width = (BmpHeader.biWidth + 1)/2
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 8:
@ -1970,7 +1968,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
for Index in range(0, len(PaletteBuffer)):
if Index % 4 == 3:
continue
PaletteTemp += bytes([PaletteBuffer[Index]])
PaletteTemp += PaletteBuffer[Index]
PaletteBuffer = PaletteTemp[1:]
return ImageBuffer, PaletteBuffer
@ -2068,7 +2066,7 @@ def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer,
if Guid in Info.Module.GetGuidsUsedByPcd():
continue
GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
for Guid, Value in list(Info.Module.Protocols.items()) + list(Info.Module.Ppis.items()):
for Guid, Value in Info.Module.Protocols.items() + Info.Module.Ppis.items():
GuidMacros.append('#define %s %s' % (Guid, Value))
# supports FixedAtBuild and FeaturePcd usage in VFR file
if Info.VfrFileList and Info.ModulePcdList:

View File

@ -13,6 +13,7 @@
## Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
import string
@ -491,7 +492,7 @@ cleanlib:
# EdkII modules always use "_ModuleEntryPoint" as entry point
ImageEntryPoint = "_ModuleEntryPoint"
for k, v in MyAgo.Module.Defines.items():
for k, v in MyAgo.Module.Defines.iteritems():
if k not in MyAgo.Macros:
MyAgo.Macros[k] = v
@ -503,7 +504,7 @@ cleanlib:
MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
PCI_COMPRESS_Flag = False
for k, v in MyAgo.Module.Defines.items():
for k, v in MyAgo.Module.Defines.iteritems():
if 'PCI_COMPRESS' == k and 'TRUE' == v:
PCI_COMPRESS_Flag = True
@ -654,7 +655,7 @@ cleanlib:
"module_relative_directory" : MyAgo.SourceDir,
"module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
"package_relative_directory": package_rel_dir,
"module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.items()],
"module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.iteritems()],
"architecture" : MyAgo.Arch,
"toolchain_tag" : MyAgo.ToolChain,
@ -668,8 +669,8 @@ cleanlib:
"separator" : Separator,
"module_tool_definitions" : ToolsDef,
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
"module_entry_point" : ModuleEntryPoint,
"image_entry_point" : ImageEntryPoint,
@ -917,7 +918,7 @@ cleanlib:
#
# Extract common files list in the dependency files
#
for File in sorted(DepSet, key=lambda x: str(x)):
for File in DepSet:
self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros))
for File in FileDependencyDict:
@ -926,11 +927,11 @@ cleanlib:
continue
NewDepSet = set(FileDependencyDict[File])
NewDepSet -= DepSet
FileDependencyDict[File] = ["$(COMMON_DEPS)"] + sorted(NewDepSet, key=lambda x: str(x))
FileDependencyDict[File] = ["$(COMMON_DEPS)"] + list(NewDepSet)
# Convert target description object to target string in makefile
for Type in self._AutoGenObject.Targets:
for T in sorted(self._AutoGenObject.Targets[Type], key=lambda x: str(x)):
for T in self._AutoGenObject.Targets[Type]:
# Generate related macros if needed
if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
self.FileListMacros[T.FileListMacro] = []
@ -1031,7 +1032,7 @@ cleanlib:
CurrentFileDependencyList = DepDb[F]
else:
try:
Fd = open(F.Path, 'rb')
Fd = open(F.Path, 'r')
except BaseException as X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
@ -1041,14 +1042,8 @@ cleanlib:
continue
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = str(FileContent, encoding="utf-16")
IncludedFileList = gIncludePattern.findall(FileContent)
else:
try:
FileContent = str(FileContent, encoding="utf-8")
IncludedFileList = gIncludePattern.findall(FileContent)
except:
continue
FileContent = unicode(FileContent, "utf-16")
IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList:
Inc = Inc.strip()
@ -1097,7 +1092,7 @@ cleanlib:
DependencySet.update(ForceList)
if File in DependencySet:
DependencySet.remove(File)
DependencyList = sorted(DependencySet, key=lambda x: str(x)) # remove duplicate ones
DependencyList = list(DependencySet) # remove duplicate ones
return DependencyList
@ -1274,8 +1269,8 @@ ${BEGIN}\t-@${create_directory_command}\n${END}\
"separator" : Separator,
"module_tool_definitions" : ToolsDef,
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
"custom_makefile_content" : CustomMakefile
@ -1448,8 +1443,8 @@ cleanlib:
"toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
"build_architecture_list" : MyAgo.Arch,
"architecture" : MyAgo.Arch,
"separator" : Separator,
@ -1584,8 +1579,8 @@ class TopLevelMakefile(BuildFile):
"toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
'arch' : list(MyAgo.ArchList),
"build_architecture_list" : ','.join(MyAgo.ArchList),
"separator" : Separator,

View File

@ -10,6 +10,7 @@
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from __future__ import absolute_import
from io import BytesIO
from Common.Misc import *
from Common.StringUtils import StringToArray
@ -257,7 +258,7 @@ class DbItemList:
# Variable length, need to calculate one by one
#
assert(Index < len(self.RawDataList))
for ItemIndex in range(Index):
for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex])
else:
Offset = self.ItemSize * Index
@ -291,7 +292,7 @@ class DbItemList:
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = bytearray()
Buffer = ''
for Datas in self.RawDataList:
if type(Datas) in (list, tuple):
for Data in Datas:
@ -316,7 +317,7 @@ class DbExMapTblItemList (DbItemList):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self):
Buffer = bytearray()
Buffer = ''
PackStr = "=LHH"
for Datas in self.RawDataList:
Buffer += pack(PackStr,
@ -344,7 +345,7 @@ class DbComItemList (DbItemList):
assert(False)
else:
assert(Index < len(self.RawDataList))
for ItemIndex in range(Index):
for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
return Offset
@ -365,7 +366,7 @@ class DbComItemList (DbItemList):
def PackData(self):
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = bytearray()
Buffer = ''
for DataList in self.RawDataList:
for Data in DataList:
if type(Data) in (list, tuple):
@ -386,7 +387,7 @@ class DbVariableTableItemList (DbComItemList):
def PackData(self):
PackStr = "=LLHHLHH"
Buffer = bytearray()
Buffer = ''
for DataList in self.RawDataList:
for Data in DataList:
Buffer += pack(PackStr,
@ -410,7 +411,7 @@ class DbStringHeadTableItemList(DbItemList):
# Variable length, need to calculate one by one
#
assert(Index < len(self.RawDataList))
for ItemIndex in range(Index):
for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex])
else:
for innerIndex in range(Index):
@ -447,7 +448,7 @@ class DbSkuHeadTableItemList (DbItemList):
def PackData(self):
PackStr = "=LL"
Buffer = bytearray()
Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
@ -469,7 +470,7 @@ class DbSizeTableItemList (DbItemList):
return length * self.ItemSize
def PackData(self):
PackStr = "=H"
Buffer = bytearray()
Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]))
@ -494,14 +495,14 @@ class DbStringItemList (DbComItemList):
assert(len(RawDataList) == len(LenList))
DataList = []
# adjust DataList according to the LenList
for Index in range(len(RawDataList)):
for Index in xrange(len(RawDataList)):
Len = LenList[Index]
RawDatas = RawDataList[Index]
assert(Len >= len(RawDatas))
ActualDatas = []
for i in range(len(RawDatas)):
for i in xrange(len(RawDatas)):
ActualDatas.append(RawDatas[i])
for i in range(len(RawDatas), Len):
for i in xrange(len(RawDatas), Len):
ActualDatas.append(0)
DataList.append(ActualDatas)
self.LenList = LenList
@ -510,7 +511,7 @@ class DbStringItemList (DbComItemList):
Offset = 0
assert(Index < len(self.LenList))
for ItemIndex in range(Index):
for ItemIndex in xrange(Index):
Offset += self.LenList[ItemIndex]
return Offset
@ -611,7 +612,7 @@ def BuildExDataBase(Dict):
DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
VpdHeadValue = Dict['VPD_DB_VALUE']
DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
ExMapTable = list(zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX']))
ExMapTable = zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX'])
DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
@ -645,7 +646,7 @@ def BuildExDataBase(Dict):
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
SizeTableValue = list(zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']))
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
@ -698,7 +699,7 @@ def BuildExDataBase(Dict):
# Get offset of SkuId table in the database
SkuIdTableOffset = FixedHeaderLen
for DbIndex in range(len(DbTotal)):
for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is SkuidValue:
break
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
@ -710,7 +711,7 @@ def BuildExDataBase(Dict):
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
DbIndex = 0
DbOffset = FixedHeaderLen
for DbIndex in range(len(DbTotal)):
for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is Table:
DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
break
@ -736,7 +737,7 @@ def BuildExDataBase(Dict):
(VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
DbIndex = 0
DbOffset = FixedHeaderLen
for DbIndex in range(len(DbTotal)):
for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is VariableRefTable:
DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
break
@ -756,7 +757,7 @@ def BuildExDataBase(Dict):
# calculate various table offset now
DbTotalLength = FixedHeaderLen
for DbIndex in range(len(DbItemTotal)):
for DbIndex in xrange(len(DbItemTotal)):
if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
LocalTokenNumberTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbExMapTable:
@ -849,7 +850,7 @@ def BuildExDataBase(Dict):
Index = 0
for Item in DbItemTotal:
Index +=1
b = bytes(Item.PackData())
b = Item.PackData()
Buffer += b
if Index == InitTableNum:
if len(Buffer) % 8:
@ -917,9 +918,9 @@ def CreatePcdDataBase(PcdDBData):
totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32]
for i in range(4):
newbuffer += bytes([totallenbuff[i]])
newbuffer += totallenbuff[i]
for i in range(36, totallen):
newbuffer += bytes([databasebuff[i]])
newbuffer += databasebuff[i]
return newbuffer
@ -962,7 +963,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = ()
for item in PcdDbBuffer:
final_data += unpack("B", bytes([item]))
final_data += unpack("B", item)
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname, skuid)] = VarCheckTab
@ -975,7 +976,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = ()
for item in PcdDbBuffer:
final_data += unpack("B", bytes([item]))
final_data += unpack("B", item)
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
@ -1348,7 +1349,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
DbValueList.append(Sku.DefaultValue)
Pcd.TokenTypeList = sorted(set(Pcd.TokenTypeList))
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
if Pcd.DatumType == TAB_VOID:
Dict['SIZE_TABLE_CNAME'].append(CName)
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
@ -1449,7 +1450,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
Pcd.TokenTypeList = sorted(set(Pcd.TokenTypeList))
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
# search the Offset and Table, used by LocalTokenNumberTableOffset
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:

View File

@ -66,7 +66,7 @@ class VariableMgr(object):
data = value_list[0]
value_list = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
value_list.append(hex(unpack("B", data_byte)[0]))
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list
try:
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"
@ -87,13 +87,13 @@ class VariableMgr(object):
data = value_list[0]
value_list = []
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
value_list.append(hex(unpack("B", data_byte)[0]))
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine)
for offset in newvalue:
value_list,itemPcdname,itemPcdDscLine = newvalue[offset]
if offset > len(BaseValue) or (offset + len(value_list) > len(BaseValue)):
EdkLogger.error("build", AUTOGEN_ERROR, "The EFI Variable referred by PCD %s in line %s exceeds variable size: %s\n" % (itemPcdname,itemPcdDscLine,hex(len(BaseValue))))
for i in range(len(value_list)):
for i in xrange(len(value_list)):
BaseValue[offset + i] = value_list[i]
newvaluestr = "{" + ",".join(BaseValue) +"}"
return newvaluestr
@ -129,7 +129,7 @@ class VariableMgr(object):
for current_valuedict_key in ordered_valuedict_keys:
if current_valuedict_key < len(var_value):
raise
for _ in range(current_valuedict_key - len(var_value)):
for _ in xrange(current_valuedict_key - len(var_value)):
var_value.append('0x00')
var_value += valuedict[current_valuedict_key]
return var_value
@ -161,7 +161,7 @@ class VariableMgr(object):
default_data_array = ()
for item in default_data_buffer:
default_data_array += unpack("B", bytes([item]))
default_data_array += unpack("B", item)
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
@ -179,7 +179,7 @@ class VariableMgr(object):
others_data_array = ()
for item in others_data_buffer:
others_data_array += unpack("B", bytes([item]))
others_data_array += unpack("B", item)
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
@ -195,7 +195,7 @@ class VariableMgr(object):
return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
NvStoreDataBuffer = bytearray()
NvStoreDataBuffer = ""
var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize
for default_data, default_info in pcds_default_data.values():
@ -222,7 +222,7 @@ class VariableMgr(object):
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
data_delta_structure_buffer = bytearray()
data_delta_structure_buffer = ""
for skuname, defaultstore in var_data:
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue
@ -254,7 +254,7 @@ class VariableMgr(object):
def unpack_data(data):
final_data = ()
for item in data:
final_data += unpack("B", bytes([item]))
final_data += unpack("B", item)
return final_data
@staticmethod
@ -322,7 +322,7 @@ class VariableMgr(object):
@staticmethod
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
Buffer = bytearray()
Buffer = ""
data_len = 0
if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","):
@ -352,7 +352,7 @@ class VariableMgr(object):
@staticmethod
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
Buffer = bytearray()
Buffer = ""
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstoragename))
@ -377,7 +377,7 @@ class VariableMgr(object):
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
Buffer = bytearray()
Buffer = ""
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstorageid))
@ -400,7 +400,7 @@ class VariableMgr(object):
@staticmethod
def PACK_VARIABLE_NAME(var_name):
Buffer = bytearray()
Buffer = ""
for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B", int(name_char, 16))

View File

@ -13,6 +13,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from Common.StringUtils import GetLineNo

View File

@ -34,7 +34,7 @@ class InfSectionParser():
SectionData = []
try:
FileLinesList = open(self._FilePath, "r").readlines()
FileLinesList = open(self._FilePath, "r", 0).readlines()
except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import re
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
@ -122,8 +123,6 @@ def DecToHexList(Dec, Digit = 8):
# @retval: A list for formatted hex string
#
def AscToHexList(Ascii):
if isinstance(Ascii, bytes):
return ['0x{0:02X}'.format(Item) for Item in Ascii]
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
## Create content of .h file
@ -552,9 +551,9 @@ def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, Ski
#
# support ISO 639-2 codes in .UNI files of EDK Shell
#
Uni = UniFileClassObject(sorted (UniFilList, key=lambda x: x.File), True, IncludePathList)
Uni = UniFileClassObject(sorted (UniFilList), True, IncludePathList)
else:
Uni = UniFileClassObject(sorted (UniFilList, key=lambda x: x.File), IsCompatibleMode, IncludePathList)
Uni = UniFileClassObject(sorted (UniFilList), IsCompatibleMode, IncludePathList)
else:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')

View File

@ -16,6 +16,7 @@
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os, codecs, re
import distutils.util
import Common.EdkLogger as EdkLogger
@ -45,6 +46,18 @@ BACK_SLASH_PLACEHOLDER = u'\u0006'
gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
## Convert a python unicode string to a normal string
#
# Convert a python unicode string to a normal string
# UniToStr(u'I am a string') is 'I am a string'
#
# @param Uni: The python unicode string
#
# @retval: The formatted normal string
#
def UniToStr(Uni):
return repr(Uni)[2:-1]
## Convert a unicode string to a Hex list
#
# Convert a unicode string to a Hex list
@ -426,7 +439,7 @@ class UniFileClassObject(object):
if EndPos != -1 and EndPos - StartPos == 6 :
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
EndStr = Line[EndPos: ]
UniStr = Line[StartPos + 2: EndPos]
UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
Line = Line[0 : StartPos] + UniStr + EndStr

View File

@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
os.mkdir(dest)
BinFileName = "PcdVarCheck.bin"
BinFilePath = os.path.join(dest, BinFileName)
Buffer = bytearray()
Buffer = ''
index = 0
for var_check_tab in self.var_check_info:
index += 1
@ -57,7 +57,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
itemIndex += 1
realLength += 5
for v_data in item.data:
if isinstance(v_data, int):
if type(v_data) in (int, long):
realLength += item.StorageWidth
else:
realLength += item.StorageWidth
@ -137,7 +137,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
Buffer += b
realLength += 1
for v_data in item.data:
if isinstance(v_data, int):
if type(v_data) in (int, long):
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
Buffer += b
realLength += item.StorageWidth
@ -241,7 +241,7 @@ class VAR_CHECK_PCD_VALID_OBJ(object):
class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validlist, PcdDataType):
super().__init__(VarOffset, validlist, PcdDataType)
super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
self.Type = 1
valid_num_list = []
for item in self.rawdata:
@ -261,7 +261,7 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validrange, PcdDataType):
super().__init__(VarOffset, validrange, PcdDataType)
super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
self.Type = 2
RangeExpr = ""
i = 0

View File

@ -20,6 +20,8 @@
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
import encodings.ascii

View File

@ -13,6 +13,7 @@
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
from io import BytesIO
from . import StringTable as st
@ -185,7 +186,7 @@ class PcdEntry:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try:
self.PcdValue = pack('%ds' % Size, bytes(ValueString, 'utf-8'))
self.PcdValue = pack('%ds' % Size, ValueString)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
@ -211,7 +212,7 @@ class PcdEntry:
ReturnArray = array.array('B')
for Index in range(len(ValueList)):
for Index in xrange(len(ValueList)):
Value = None
if ValueList[Index].lower().startswith('0x'):
# translate hex value
@ -237,7 +238,7 @@ class PcdEntry:
ReturnArray.append(Value)
for Index in range(len(ValueList), Size):
for Index in xrange(len(ValueList), Size):
ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
@ -272,7 +273,7 @@ class PcdEntry:
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
(Value, UnicodeString, self.FileName, self.Lineno))
for Index in range(len(UnicodeString) * 2, Size):
for Index in xrange(len(UnicodeString) * 2, Size):
ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
@ -305,7 +306,7 @@ class GenVPD :
self.PcdFixedOffsetSizeList = []
self.PcdUnknownOffsetList = []
try:
fInputfile = open(InputFileName, "r")
fInputfile = open(InputFileName, "r", 0)
try:
self.FileLinesList = fInputfile.readlines()
except:
@ -430,7 +431,7 @@ class GenVPD :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, 'The offset value of PCD %s should be %s-byte aligned.' % (PCD.PcdCName, Alignment))
else:
if PCD.PcdOccupySize % Alignment != 0:
PCD.PcdOccupySize = (PCD.PcdOccupySize // Alignment + 1) * Alignment
PCD.PcdOccupySize = (PCD.PcdOccupySize / Alignment + 1) * Alignment
PackSize = PCD.PcdOccupySize
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
@ -508,7 +509,7 @@ class GenVPD :
NowOffset = 0
for Pcd in self.PcdUnknownOffsetList :
if NowOffset % Pcd.Alignment != 0:
NowOffset = (NowOffset // Pcd.Alignment + 1) * Pcd.Alignment
NowOffset = (NowOffset/ Pcd.Alignment + 1) * Pcd.Alignment
Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdOccupySize
@ -572,7 +573,7 @@ class GenVPD :
# Not been fixed
if eachUnfixedPcd.PcdOffset == '*' :
if LastOffset % eachUnfixedPcd.Alignment != 0:
LastOffset = (LastOffset // eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
LastOffset = (LastOffset / eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
# The offset un-fixed pcd can write into this free space
if needFixPcdSize <= (NowOffset - LastOffset) :
# Change the offset value of un-fixed pcd
@ -626,7 +627,7 @@ class GenVPD :
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset // NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset / NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
@ -650,13 +651,13 @@ class GenVPD :
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
try :
fMapFile = open(MapFileName, "w")
fMapFile = open(MapFileName, "w", 0)
except:
# Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
fStringIO = BytesIO('')
# Write the header of map file.
try :
@ -674,7 +675,8 @@ class GenVPD :
# Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset)
if isinstance(eachPcd.PcdValue, list):
fStringIO.write(bytes(eachPcd.PcdValue))
ValueList = [chr(Item) for Item in eachPcd.PcdValue]
fStringIO.write(''.join(ValueList))
else:
fStringIO.write (eachPcd.PcdValue)

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import sqlite3
import Common.LongFilePathOs as os

View File

@ -12,6 +12,7 @@
#
## Import modules
from __future__ import absolute_import
import Common.LongFilePathOs as os, sys, logging
import traceback
from .BuildToolError import *

View File

@ -12,6 +12,8 @@
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
from Common.GlobalData import *
from CommonDataClass.Exceptions import BadExpression
from CommonDataClass.Exceptions import WrnExpression
@ -204,7 +206,7 @@ SupportedInMacroList = ['TARGET', 'TOOL_CHAIN_TAG', 'ARCH', 'FAMILY']
class BaseExpression(object):
def __init__(self, *args, **kwargs):
super().__init__()
super(BaseExpression, self).__init__()
# Check if current token matches the operators given from parameter
def _IsOperator(self, OpSet):
@ -324,7 +326,7 @@ class ValueExpression(BaseExpression):
return Val
def __init__(self, Expression, SymbolTable={}):
super().__init__(self, Expression, SymbolTable)
super(ValueExpression, self).__init__(self, Expression, SymbolTable)
self._NoProcess = False
if not isinstance(Expression, type('')):
self._Expr = Expression
@ -425,13 +427,6 @@ class ValueExpression(BaseExpression):
else:
Val = Val3
continue
#
# PEP 238 -- Changing the Division Operator
# x/y to return a reasonable approximation of the mathematical result of the division ("true division")
# x//y to return the floor ("floor division")
#
if Op == '/':
Op = '//'
try:
Val = self.Eval(Op, Val, EvalFunc())
except WrnExpression as Warn:
@ -905,7 +900,7 @@ class ValueExpressionEx(ValueExpression):
if TmpValue.bit_length() == 0:
PcdValue = '{0x00}'
else:
for I in range((TmpValue.bit_length() + 7) // 8):
for I in range((TmpValue.bit_length() + 7) / 8):
TmpList.append('0x%02x' % ((TmpValue >> I * 8) & 0xff))
PcdValue = '{' + ', '.join(TmpList) + '}'
except:
@ -1033,7 +1028,7 @@ class ValueExpressionEx(ValueExpression):
if __name__ == '__main__':
pass
while True:
input = input('Input expr: ')
input = raw_input('Input expr: ')
if input in 'qQ':
break
try:

View File

@ -11,9 +11,11 @@
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from __future__ import absolute_import
import os
from . import LongFilePathOsPath
from Common.LongFilePathSupport import LongFilePath
from Common.LongFilePathSupport import UniToStr
import time
path = LongFilePathOsPath
@ -62,7 +64,7 @@ def listdir(path):
List = []
uList = os.listdir(u"%s" % LongFilePath(path))
for Item in uList:
List.append(Item)
List.append(UniToStr(Item))
return List
environ = os.environ

View File

@ -1,7 +1,7 @@
## @file
# Override built in function file.open to provide support for long file path
#
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@ -49,3 +49,15 @@ def CopyLongFilePath(src, dst):
with open(LongFilePath(src), 'rb') as fsrc:
with open(LongFilePath(dst), 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst)
## Convert a python unicode string to a normal string
#
# Convert a python unicode string to a normal string
# UniToStr(u'I am a string') is 'I am a string'
#
# @param Uni: The python unicode string
#
# @retval: The formatted normal string
#
def UniToStr(Uni):
return repr(Uni)[2:-1]

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
import string
@ -24,8 +25,8 @@ import pickle
import array
import shutil
from struct import pack
from collections import UserDict as IterableUserDict
from collections import OrderedDict
from UserDict import IterableUserDict
from UserList import UserList
from Common import EdkLogger as EdkLogger
from Common import GlobalData as GlobalData
@ -454,16 +455,13 @@ def RemoveDirectory(Directory, Recursively=False):
# @retval False If the file content is the same
#
def SaveFileOnChange(File, Content, IsBinaryFile=True):
if not IsBinaryFile:
Content = Content.replace("\n", os.linesep)
if os.path.exists(File):
try:
if isinstance(Content, bytes):
with open(File, "rb") as f:
if Content == f.read():
return False
else:
with open(File, "r") as f:
if Content == f.read():
return False
if Content == open(File, "rb").read():
return False
except:
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
@ -477,12 +475,19 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True):
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
try:
if isinstance(Content, bytes):
with open(File, "wb") as Fd:
if GlobalData.gIsWindows:
try:
from .PyUtility import SaveFileToDisk
if not SaveFileToDisk(File, Content):
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData=File)
except:
Fd = open(File, "wb")
Fd.write(Content)
Fd.close()
else:
with open(File, "w") as Fd:
Fd.write(Content)
Fd = open(File, "wb")
Fd.write(Content)
Fd.close()
except IOError as X:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
@ -641,7 +646,7 @@ def RealPath2(File, Dir='', OverrideDir=''):
#
def GuidValue(CName, PackageList, Inffile = None):
for P in PackageList:
GuidKeys = list(P.Guids.keys())
GuidKeys = P.Guids.keys()
if Inffile and P._PrivateGuids:
if not Inffile.startswith(P.MetaFile.Dir):
GuidKeys = [x for x in P.Guids if x not in P._PrivateGuids]
@ -660,7 +665,7 @@ def GuidValue(CName, PackageList, Inffile = None):
#
def ProtocolValue(CName, PackageList, Inffile = None):
for P in PackageList:
ProtocolKeys = list(P.Protocols.keys())
ProtocolKeys = P.Protocols.keys()
if Inffile and P._PrivateProtocols:
if not Inffile.startswith(P.MetaFile.Dir):
ProtocolKeys = [x for x in P.Protocols if x not in P._PrivateProtocols]
@ -679,7 +684,7 @@ def ProtocolValue(CName, PackageList, Inffile = None):
#
def PpiValue(CName, PackageList, Inffile = None):
for P in PackageList:
PpiKeys = list(P.Ppis.keys())
PpiKeys = P.Ppis.keys()
if Inffile and P._PrivatePpis:
if not Inffile.startswith(P.MetaFile.Dir):
PpiKeys = [x for x in P.Ppis if x not in P._PrivatePpis]
@ -975,7 +980,7 @@ class sdict(IterableUserDict):
## append support
def append(self, sdict):
for key in sdict.keys():
for key in sdict:
if key not in self._key_list:
self._key_list.append(key)
IterableUserDict.__setitem__(self, key, sdict[key])
@ -1015,11 +1020,11 @@ class sdict(IterableUserDict):
## Keys interation support
def iterkeys(self):
return self.keys()
return iter(self.keys())
## Values interation support
def itervalues(self):
return self.values()
return iter(self.values())
## Return value related to a key, and remove the (key, value) from the dict
def pop(self, key, *dv):
@ -1028,7 +1033,7 @@ class sdict(IterableUserDict):
value = self[key]
self.__delitem__(key)
elif len(dv) != 0 :
value = dv[0]
value = kv[0]
return value
## Return (key, value) pair, and remove the (key, value) from the dict
@ -1292,12 +1297,12 @@ def ParseDevPathValue (Value):
if err:
raise BadExpression("DevicePath: %s" % str(err))
Size = len(out.split())
out = ','.join(out.decode(encoding='utf-8', errors='ignore').split())
out = ','.join(out.split())
return '{' + out + '}', Size
def ParseFieldValue (Value):
if isinstance(Value, type(0)):
return Value, (Value.bit_length() + 7) // 8
return Value, (Value.bit_length() + 7) / 8
if not isinstance(Value, type('')):
raise BadExpression('Type %s is %s' %(Value, type(Value)))
Value = Value.strip()
@ -1331,7 +1336,7 @@ def ParseFieldValue (Value):
if Value[0] == '"' and Value[-1] == '"':
Value = Value[1:-1]
try:
Value = "{" + ','.join([str(i) for i in uuid.UUID(Value).bytes_le]) + "}"
Value = "'" + uuid.UUID(Value).get_bytes_le() + "'"
except ValueError as Message:
raise BadExpression(Message)
Value, Size = ParseFieldValue(Value)
@ -1418,12 +1423,12 @@ def ParseFieldValue (Value):
raise BadExpression("invalid hex value: %s" % Value)
if Value == 0:
return 0, 1
return Value, (Value.bit_length() + 7) // 8
return Value, (Value.bit_length() + 7) / 8
if Value[0].isdigit():
Value = int(Value, 10)
if Value == 0:
return 0, 1
return Value, (Value.bit_length() + 7) // 8
return Value, (Value.bit_length() + 7) / 8
if Value.lower() == 'true':
return 1, 1
if Value.lower() == 'false':
@ -1584,19 +1589,15 @@ def CheckPcdDatum(Type, Value):
return False, "Invalid value [%s] of type [%s]; must be one of TRUE, True, true, 0x1, 0x01, 1"\
", FALSE, False, false, 0x0, 0x00, 0" % (Value, Type)
elif Type in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64]:
try:
Val = int(Value, 0)
except:
try:
Val = int(Value.lstrip('0'))
except:
return False, "Invalid value [%s] of type [%s];" \
" must be a hexadecimal, decimal or octal in C language format." % (Value, Type)
if Val > MAX_VAL_TYPE[Type]:
return False, "Too large PCD value[%s] for datum type [%s]" % (Value, Type)
if Val < 0:
if Value and int(Value, 0) < 0:
return False, "PCD can't be set to negative value[%s] for datum type [%s]" % (Value, Type)
try:
Value = long(Value, 0)
if Value > MAX_VAL_TYPE[Type]:
return False, "Too large PCD value[%s] for datum type [%s]" % (Value, Type)
except:
return False, "Invalid value [%s] of type [%s];"\
" must be a hexadecimal, decimal or octal in C language format." % (Value, Type)
else:
return True, "StructurePcd"
@ -1634,7 +1635,7 @@ def SplitOption(OptionString):
def CommonPath(PathList):
P1 = min(PathList).split(os.path.sep)
P2 = max(PathList).split(os.path.sep)
for Index in range(min(len(P1), len(P2))):
for Index in xrange(min(len(P1), len(P2))):
if P1[Index] != P2[Index]:
return os.path.sep.join(P1[:Index])
return os.path.sep.join(P1)
@ -1859,7 +1860,7 @@ class PeImageClass():
ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 4)
# PE signature should be 'PE\0\0'
if ByteArray.tostring() != b'PE\0\0':
if ByteArray.tostring() != 'PE\0\0':
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
return
@ -1951,7 +1952,7 @@ class SkuClass():
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
% (SkuName, SkuId))
self.AvailableSkuIds = OrderedDict()
self.AvailableSkuIds = sdict()
self.SkuIdSet = []
self.SkuIdNumberSet = []
self.SkuData = SkuIds
@ -1961,7 +1962,7 @@ class SkuClass():
self.SkuIdSet = ['DEFAULT']
self.SkuIdNumberSet = ['0U']
elif SkuIdentifier == 'ALL':
self.SkuIdSet = list(SkuIds.keys())
self.SkuIdSet = SkuIds.keys()
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
else:
r = SkuIdentifier.split('|')
@ -2081,7 +2082,7 @@ def PackRegistryFormatGuid(Guid):
# @retval Value The integer value that the input represents
#
def GetIntegerValue(Input):
if isinstance(Input, int):
if type(Input) in (int, long):
return Input
String = Input
if String.endswith("U"):

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from .StringUtils import *
from CommonDataClass.DataClass import *
from .DataType import *

Binary file not shown.

View File

@ -12,6 +12,7 @@
# # Import Modules
#
from __future__ import print_function
from Common.GlobalData import *
from CommonDataClass.Exceptions import BadExpression
from CommonDataClass.Exceptions import WrnExpression
@ -347,7 +348,7 @@ class RangeExpression(BaseExpression):
def __init__(self, Expression, PcdDataType, SymbolTable = {}):
super().__init__(self, Expression, PcdDataType, SymbolTable)
super(RangeExpression, self).__init__(self, Expression, PcdDataType, SymbolTable)
self._NoProcess = False
if not isinstance(Expression, type('')):
self._Expr = Expression

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import re
from . import DataType
import Common.LongFilePathOs as os
@ -98,7 +99,7 @@ def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
# @retval list() A list for splitted string
#
def GetSplitList(String, SplitStr=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
return list(map(lambda l: l.strip(), String.split(SplitStr, MaxSplit)))
return map(lambda l: l.strip(), String.split(SplitStr, MaxSplit))
## MergeArches
#
@ -544,7 +545,7 @@ def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCh
#
LineList[1] = CleanString(LineList[1], CommentCharacter)
if ValueSplitFlag:
Value = list(map(string.strip, LineList[1].split(ValueSplitCharacter)))
Value = map(string.strip, LineList[1].split(ValueSplitCharacter))
else:
Value = CleanString(LineList[1], CommentCharacter).splitlines()
@ -612,7 +613,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
#
# Regenerate FileContent
#
NewFileContent = NewFileContent + Line + '\n'
NewFileContent = NewFileContent + Line + '\r\n'
if IsFailed:
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
@ -750,7 +751,7 @@ def SplitString(String):
# @param StringList: A list for strings to be converted
#
def ConvertToSqlString(StringList):
return list(map(lambda s: s.replace("'", "''"), StringList))
return map(lambda s: s.replace("'", "''"), StringList)
## Convert To Sql String
#
@ -815,7 +816,11 @@ def GetHelpTextList(HelpTextClassList):
return List
def StringToArray(String):
if String.startswith('L"'):
if isinstance(String, unicode):
if len(unicode) == 0:
return "{0x00,0x00}"
return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String)
elif String.startswith('L"'):
if String == "L\"\"":
return "{0x00,0x00}"
else:
@ -838,7 +843,9 @@ def StringToArray(String):
return '{%s,0,0}' % ','.join(String.split())
def StringArrayLength(String):
if String.startswith('L"'):
if isinstance(String, unicode):
return (len(String) + 1) * 2 + 1;
elif String.startswith('L"'):
return (len(String) - 3 + 1) * 2
elif String.startswith('"'):
return (len(String) - 2 + 1)

View File

@ -14,6 +14,8 @@
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
from . import EdkLogger
from . import DataType

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
from . import EdkLogger

View File

@ -15,6 +15,7 @@
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from __future__ import print_function
import Common.LongFilePathOs as os
import re
import Common.EdkLogger as EdkLogger
@ -91,18 +92,18 @@ class VpdInfoFile:
if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= "0" or Offset == "*"):
if not (Offset >= 0 or Offset == "*"):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == TAB_VOID:
if Vpd.MaxDatumSize <= "0":
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
if not Vpd.MaxDatumSize:
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
else:
if Vpd.MaxDatumSize <= "0":
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
@ -126,7 +127,7 @@ class VpdInfoFile:
"Invalid parameter FilePath: %s." % FilePath)
Content = FILE_COMMENT_TEMPLATE
Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)
Pcds = sorted(self._VpdArray.keys())
for Pcd in Pcds:
i = 0
PcdTokenCName = Pcd.TokenCName
@ -248,7 +249,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
except Exception as X:
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
(out, error) = PopenObject.communicate()
print(out.decode(encoding='utf-8', errors='ignore'))
print(out)
while PopenObject.returncode is None :
PopenObject.wait()

View File

@ -1,636 +0,0 @@
/* @file
This file is used to be the grammar file of ECC tool
Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
This program and the accompanying materials
are licensed and made available under the terms and conditions of the BSD License
which accompanies this distribution. The full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
*/
grammar C;
options {
language=Python;
}
@header {
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at:
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
}
@members {
def printTokenInfo(self, line, offset, tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
}
translation_unit
: external_declaration*
;
external_declaration
: ( declaration_specifiers? declarator declaration* '{' )
| function_definition
| declaration
| macro_statement (';')?
;
function_definition
locals [String ModifierText = '', String DeclText = '', int LBLine = 0, int LBOffset = 0, int DeclLine = 0, int DeclOffset = 0]
@init {
ModifierText = '';
DeclText = '';
LBLine = 0;
LBOffset = 0;
DeclLine = 0;
DeclOffset = 0;
}
@after{
self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
}
: d=declaration_specifiers? declarator
( declaration+ a=compound_statement // K&R style
| b=compound_statement // ANSI style
) {
if localctx.d != None:
ModifierText = $declaration_specifiers.text
else:
ModifierText = ''
DeclText = $declarator.text
DeclLine = $declarator.start.line
DeclOffset = $declarator.start.column
if localctx.a != None:
LBLine = $a.start.line
LBOffset = $a.start.column
else:
LBLine = $b.start.line
LBOffset = $b.start.column
}
;
declaration_specifiers
: ( storage_class_specifier
| type_specifier
| type_qualifier
)+
;
declaration
: a='typedef' b=declaration_specifiers? c=init_declarator_list d=';'
{
if localctx.b is not None:
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, $d.line, localctx.d.column, $b.text, $c.text)
else:
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, $d.line, localctx.d.column, '', $c.text)
}
| s=declaration_specifiers t=init_declarator_list? e=';'
{
if localctx.t is not None:
self.StoreVariableDeclaration($s.start.line, $s.start.column, $t.start.line, $t.start.column, $s.text, $t.text)
}
;
init_declarator_list
: init_declarator (',' init_declarator)*
;
init_declarator
: declarator ('=' initializer)?
;
storage_class_specifier
: 'extern'
| 'static'
| 'auto'
| 'register'
| 'STATIC'
;
type_specifier
: 'void'
| 'char'
| 'short'
| 'int'
| 'long'
| 'float'
| 'double'
| 'signed'
| 'unsigned'
| s=struct_or_union_specifier
{
if localctx.s.stop is not None:
self.StoreStructUnionDefinition($s.start.line, $s.start.column, $s.stop.line, $s.stop.column, $s.text)
}
| e=enum_specifier
{
if localctx.e.stop is not None:
self.StoreEnumerationDefinition($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)
}
| (IDENTIFIER type_qualifier* declarator)
| type_id
;
type_id
: IDENTIFIER
//{self.printTokenInfo($a.line, $a.pos, $a.text)}
;
struct_or_union_specifier
: struct_or_union IDENTIFIER? '{' struct_declaration_list '}'
| struct_or_union IDENTIFIER
;
struct_or_union
: 'struct'
| 'union'
;
struct_declaration_list
: struct_declaration+
;
struct_declaration
: specifier_qualifier_list struct_declarator_list ';'
;
specifier_qualifier_list
: ( type_qualifier | type_specifier )+
;
struct_declarator_list
: struct_declarator (',' struct_declarator)*
;
struct_declarator
: declarator (':' constant_expression)?
| ':' constant_expression
;
enum_specifier
: 'enum' '{' enumerator_list ','? '}'
| 'enum' IDENTIFIER '{' enumerator_list ','? '}'
| 'enum' IDENTIFIER
;
enumerator_list
: enumerator (',' enumerator)*
;
enumerator
: IDENTIFIER ('=' constant_expression)?
;
type_qualifier
: 'const'
| 'volatile'
| 'IN'
| 'OUT'
| 'OPTIONAL'
| 'CONST'
| 'UNALIGNED'
| 'VOLATILE'
| 'GLOBAL_REMOVE_IF_UNREFERENCED'
| 'EFIAPI'
| 'EFI_BOOTSERVICE'
| 'EFI_RUNTIMESERVICE'
| 'PACKED'
;
declarator
: pointer? ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? direct_declarator
// | ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? pointer? direct_declarator
| pointer
;
direct_declarator
: IDENTIFIER declarator_suffix*
| '(' ('EFIAPI')? declarator ')' declarator_suffix+
;
declarator_suffix
: '[' constant_expression ']'
| '[' ']'
| '(' parameter_type_list ')'
| '(' identifier_list ')'
| '(' ')'
;
pointer
: '*' type_qualifier+ pointer?
| '*' pointer
| '*'
;
parameter_type_list
: parameter_list (',' ('OPTIONAL')? '...')?
;
parameter_list
: parameter_declaration (',' ('OPTIONAL')? parameter_declaration)*
;
parameter_declaration
: declaration_specifiers (declarator|abstract_declarator)* ('OPTIONAL')?
//accomerdate user-defined type only, no declarator follow.
| pointer* IDENTIFIER
;
identifier_list
: IDENTIFIER
(',' IDENTIFIER)*
;
type_name
: specifier_qualifier_list abstract_declarator?
| type_id
;
abstract_declarator
: pointer direct_abstract_declarator?
| direct_abstract_declarator
;
direct_abstract_declarator
: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) abstract_declarator_suffix*
;
abstract_declarator_suffix
: '[' ']'
| '[' constant_expression ']'
| '(' ')'
| '(' parameter_type_list ')'
;
initializer
: assignment_expression
| '{' initializer_list ','? '}'
;
initializer_list
: initializer (',' initializer )*
;
// E x p r e s s i o n s
argument_expression_list
: assignment_expression ('OPTIONAL')? (',' assignment_expression ('OPTIONAL')?)*
;
additive_expression
: (multiplicative_expression) ('+' multiplicative_expression | '-' multiplicative_expression)*
;
multiplicative_expression
: (cast_expression) ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
;
cast_expression
: '(' type_name ')' cast_expression
| unary_expression
;
unary_expression
: postfix_expression
| '++' unary_expression
| '--' unary_expression
| unary_operator cast_expression
| 'sizeof' unary_expression
| 'sizeof' '(' type_name ')'
;
postfix_expression
locals [FuncCallText='']
@init
{
self.FuncCallText=''
}
: p=primary_expression {self.FuncCallText += $p.text}
( '[' expression ']'
| '(' a=')'{self.StoreFunctionCalling($p.start.line, $p.start.column, $a.line, localctx.a.column, self.FuncCallText, '')}
| '(' c=argument_expression_list b=')' {self.StoreFunctionCalling($p.start.line, $p.start.column, $b.line, localctx.b.column, self.FuncCallText, $c.text)}
| '(' macro_parameter_list ')'
| '.' x=IDENTIFIER {self.FuncCallText += '.' + $x.text}
| '*' y=IDENTIFIER {self.FuncCallText = $y.text}
| '->' z=IDENTIFIER {self.FuncCallText += '->' + $z.text}
| '++'
| '--'
)*
;
macro_parameter_list
: parameter_declaration (',' parameter_declaration)*
;
unary_operator
: '&'
| '*'
| '+'
| '-'
| '~'
| '!'
;
primary_expression
: IDENTIFIER
| constant
| '(' expression ')'
;
constant
: HEX_LITERAL
| OCTAL_LITERAL
| DECIMAL_LITERAL
| CHARACTER_LITERAL
| (IDENTIFIER* STRING_LITERAL+)+ IDENTIFIER*
| FLOATING_POINT_LITERAL
;
/////
expression
: assignment_expression (',' assignment_expression)*
;
constant_expression
: conditional_expression
;
assignment_expression
: lvalue assignment_operator assignment_expression
| conditional_expression
;
lvalue
: unary_expression
;
assignment_operator
: '='
| '*='
| '/='
| '%='
| '+='
| '-='
| '<<='
| '>>='
| '&='
| '^='
| '|='
;
conditional_expression
: e=logical_or_expression ('?' expression ':' conditional_expression {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)})?
;
logical_or_expression
: logical_and_expression ('||' logical_and_expression)*
;
logical_and_expression
: inclusive_or_expression ('&&' inclusive_or_expression)*
;
inclusive_or_expression
: exclusive_or_expression ('|' exclusive_or_expression)*
;
exclusive_or_expression
: and_expression ('^' and_expression)*
;
and_expression
: equality_expression ('&' equality_expression)*
;
equality_expression
: relational_expression (('=='|'!=') relational_expression )*
;
relational_expression
: shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
;
shift_expression
: additive_expression (('<<'|'>>') additive_expression)*
;
// S t a t e m e n t s
statement
: labeled_statement
| compound_statement
| expression_statement
| selection_statement
| iteration_statement
| jump_statement
| macro_statement
| asm2_statement
| asm1_statement
| asm_statement
| declaration
;
asm2_statement
: '__asm__'? IDENTIFIER '(' (~(';'))* ')' ';'
;
asm1_statement
: '_asm' '{' (~('}'))* '}'
;
asm_statement
: '__asm' '{' (~('}'))* '}'
;
macro_statement
: IDENTIFIER '(' declaration* statement_list? expression? ')'
;
labeled_statement
: IDENTIFIER ':' statement
| 'case' constant_expression ':' statement
| 'default' ':' statement
;
compound_statement
: '{' declaration* statement_list? '}'
;
statement_list
: statement+
;
expression_statement
: ';'
| expression ';'
;
selection_statement
: 'if' '(' e=expression ')' {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)} statement (:'else' statement)?
| 'switch' '(' expression ')' statement
;
iteration_statement
: 'while' '(' e=expression ')' statement {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
| 'do' statement 'while' '(' e=expression ')' ';' {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
//| 'for' '(' expression_statement e=expression_statement expression? ')' statement {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
;
jump_statement
: 'goto' IDENTIFIER ';'
| 'continue' ';'
| 'break' ';'
| 'return' ';'
| 'return' expression ';'
;
IDENTIFIER
: LETTER (LETTER|'0'..'9')*
;
fragment
LETTER
: '$'
| 'A'..'Z'
| 'a'..'z'
| '_'
;
CHARACTER_LITERAL
: ('L')? '\'' ( EscapeSequence | ~('\''|'\\') ) '\''
;
STRING_LITERAL
: ('L')? '"' ( EscapeSequence | ~('\\'|'"') )* '"'
;
HEX_LITERAL : '0' ('x'|'X') HexDigit+ IntegerTypeSuffix? ;
DECIMAL_LITERAL : ('0' | '1'..'9' '0'..'9'*) IntegerTypeSuffix? ;
OCTAL_LITERAL : '0' ('0'..'7')+ IntegerTypeSuffix? ;
fragment
HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;
fragment
IntegerTypeSuffix
: ('u'|'U')
| ('l'|'L')
| ('u'|'U') ('l'|'L')
| ('u'|'U') ('l'|'L') ('l'|'L')
;
FLOATING_POINT_LITERAL
: ('0'..'9')+ '.' ('0'..'9')* Exponent? FloatTypeSuffix?
| '.' ('0'..'9')+ Exponent? FloatTypeSuffix?
| ('0'..'9')+ Exponent FloatTypeSuffix?
| ('0'..'9')+ Exponent? FloatTypeSuffix
;
fragment
Exponent : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;
fragment
FloatTypeSuffix : ('f'|'F'|'d'|'D') ;
fragment
EscapeSequence
: '\\' ('b'|'t'|'n'|'f'|'r'|'\''|'\\')
| OctalEscape
;
fragment
OctalEscape
: '\\' ('0'..'3') ('0'..'7') ('0'..'7')
| '\\' ('0'..'7') ('0'..'7')
| '\\' ('0'..'7')
;
fragment
UnicodeEscape
: '\\' 'u' HexDigit HexDigit HexDigit HexDigit
;
WS : (' '|'\r'|'\t'|'\u000C'|'\n')
-> channel(HIDDEN)
;
// ingore '\' of line concatenation
BS : ('\\')
-> channel(HIDDEN)
;
UnicodeVocabulary
: '\u0003'..'\uFFFE'
;
COMMENT
: '/*' .*? '*/'
-> channel(HIDDEN)
;
LINE_COMMENT
: '//' ~('\n'|'\r')* '\r'? '\n'
-> channel(HIDDEN)
;
// ignore #line info for now
LINE_COMMAND
: '#' ~('\n'|'\r')* '\r'? '\n'
-> channel(HIDDEN)
;

File diff suppressed because it is too large Load Diff

View File

@ -1,672 +0,0 @@
# Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .CParser import CParser
else:
from CParser import CParser
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at:
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
# This class defines a complete listener for a parse tree produced by CParser.
class CListener(ParseTreeListener):
# Enter a parse tree produced by CParser#translation_unit.
def enterTranslation_unit(self, ctx:CParser.Translation_unitContext):
pass
# Exit a parse tree produced by CParser#translation_unit.
def exitTranslation_unit(self, ctx:CParser.Translation_unitContext):
pass
# Enter a parse tree produced by CParser#external_declaration.
def enterExternal_declaration(self, ctx:CParser.External_declarationContext):
pass
# Exit a parse tree produced by CParser#external_declaration.
def exitExternal_declaration(self, ctx:CParser.External_declarationContext):
pass
# Enter a parse tree produced by CParser#function_definition.
def enterFunction_definition(self, ctx:CParser.Function_definitionContext):
pass
# Exit a parse tree produced by CParser#function_definition.
def exitFunction_definition(self, ctx:CParser.Function_definitionContext):
pass
# Enter a parse tree produced by CParser#declaration_specifiers.
def enterDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
pass
# Exit a parse tree produced by CParser#declaration_specifiers.
def exitDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
pass
# Enter a parse tree produced by CParser#declaration.
def enterDeclaration(self, ctx:CParser.DeclarationContext):
pass
# Exit a parse tree produced by CParser#declaration.
def exitDeclaration(self, ctx:CParser.DeclarationContext):
pass
# Enter a parse tree produced by CParser#init_declarator_list.
def enterInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
pass
# Exit a parse tree produced by CParser#init_declarator_list.
def exitInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
pass
# Enter a parse tree produced by CParser#init_declarator.
def enterInit_declarator(self, ctx:CParser.Init_declaratorContext):
pass
# Exit a parse tree produced by CParser#init_declarator.
def exitInit_declarator(self, ctx:CParser.Init_declaratorContext):
pass
# Enter a parse tree produced by CParser#storage_class_specifier.
def enterStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
pass
# Exit a parse tree produced by CParser#storage_class_specifier.
def exitStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
pass
# Enter a parse tree produced by CParser#type_specifier.
def enterType_specifier(self, ctx:CParser.Type_specifierContext):
pass
# Exit a parse tree produced by CParser#type_specifier.
def exitType_specifier(self, ctx:CParser.Type_specifierContext):
pass
# Enter a parse tree produced by CParser#type_id.
def enterType_id(self, ctx:CParser.Type_idContext):
pass
# Exit a parse tree produced by CParser#type_id.
def exitType_id(self, ctx:CParser.Type_idContext):
pass
# Enter a parse tree produced by CParser#struct_or_union_specifier.
def enterStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
pass
# Exit a parse tree produced by CParser#struct_or_union_specifier.
def exitStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
pass
# Enter a parse tree produced by CParser#struct_or_union.
def enterStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
pass
# Exit a parse tree produced by CParser#struct_or_union.
def exitStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
pass
# Enter a parse tree produced by CParser#struct_declaration_list.
def enterStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
pass
# Exit a parse tree produced by CParser#struct_declaration_list.
def exitStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
pass
# Enter a parse tree produced by CParser#struct_declaration.
def enterStruct_declaration(self, ctx:CParser.Struct_declarationContext):
pass
# Exit a parse tree produced by CParser#struct_declaration.
def exitStruct_declaration(self, ctx:CParser.Struct_declarationContext):
pass
# Enter a parse tree produced by CParser#specifier_qualifier_list.
def enterSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
pass
# Exit a parse tree produced by CParser#specifier_qualifier_list.
def exitSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
pass
# Enter a parse tree produced by CParser#struct_declarator_list.
def enterStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
pass
# Exit a parse tree produced by CParser#struct_declarator_list.
def exitStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
pass
# Enter a parse tree produced by CParser#struct_declarator.
def enterStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
pass
# Exit a parse tree produced by CParser#struct_declarator.
def exitStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
pass
# Enter a parse tree produced by CParser#enum_specifier.
def enterEnum_specifier(self, ctx:CParser.Enum_specifierContext):
pass
# Exit a parse tree produced by CParser#enum_specifier.
def exitEnum_specifier(self, ctx:CParser.Enum_specifierContext):
pass
# Enter a parse tree produced by CParser#enumerator_list.
def enterEnumerator_list(self, ctx:CParser.Enumerator_listContext):
pass
# Exit a parse tree produced by CParser#enumerator_list.
def exitEnumerator_list(self, ctx:CParser.Enumerator_listContext):
pass
# Enter a parse tree produced by CParser#enumerator.
def enterEnumerator(self, ctx:CParser.EnumeratorContext):
pass
# Exit a parse tree produced by CParser#enumerator.
def exitEnumerator(self, ctx:CParser.EnumeratorContext):
pass
# Enter a parse tree produced by CParser#type_qualifier.
def enterType_qualifier(self, ctx:CParser.Type_qualifierContext):
pass
# Exit a parse tree produced by CParser#type_qualifier.
def exitType_qualifier(self, ctx:CParser.Type_qualifierContext):
pass
# Enter a parse tree produced by CParser#declarator.
def enterDeclarator(self, ctx:CParser.DeclaratorContext):
pass
# Exit a parse tree produced by CParser#declarator.
def exitDeclarator(self, ctx:CParser.DeclaratorContext):
pass
# Enter a parse tree produced by CParser#direct_declarator.
def enterDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
pass
# Exit a parse tree produced by CParser#direct_declarator.
def exitDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
pass
# Enter a parse tree produced by CParser#declarator_suffix.
def enterDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
pass
# Exit a parse tree produced by CParser#declarator_suffix.
def exitDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
pass
# Enter a parse tree produced by CParser#pointer.
def enterPointer(self, ctx:CParser.PointerContext):
pass
# Exit a parse tree produced by CParser#pointer.
def exitPointer(self, ctx:CParser.PointerContext):
pass
# Enter a parse tree produced by CParser#parameter_type_list.
def enterParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
pass
# Exit a parse tree produced by CParser#parameter_type_list.
def exitParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
pass
# Enter a parse tree produced by CParser#parameter_list.
def enterParameter_list(self, ctx:CParser.Parameter_listContext):
pass
# Exit a parse tree produced by CParser#parameter_list.
def exitParameter_list(self, ctx:CParser.Parameter_listContext):
pass
# Enter a parse tree produced by CParser#parameter_declaration.
def enterParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
pass
# Exit a parse tree produced by CParser#parameter_declaration.
def exitParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
pass
# Enter a parse tree produced by CParser#identifier_list.
def enterIdentifier_list(self, ctx:CParser.Identifier_listContext):
pass
# Exit a parse tree produced by CParser#identifier_list.
def exitIdentifier_list(self, ctx:CParser.Identifier_listContext):
pass
# Enter a parse tree produced by CParser#type_name.
def enterType_name(self, ctx:CParser.Type_nameContext):
pass
# Exit a parse tree produced by CParser#type_name.
def exitType_name(self, ctx:CParser.Type_nameContext):
pass
# Enter a parse tree produced by CParser#abstract_declarator.
def enterAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
pass
# Exit a parse tree produced by CParser#abstract_declarator.
def exitAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
pass
# Enter a parse tree produced by CParser#direct_abstract_declarator.
def enterDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
pass
# Exit a parse tree produced by CParser#direct_abstract_declarator.
def exitDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
pass
# Enter a parse tree produced by CParser#abstract_declarator_suffix.
def enterAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
pass
# Exit a parse tree produced by CParser#abstract_declarator_suffix.
def exitAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
pass
# Enter a parse tree produced by CParser#initializer.
def enterInitializer(self, ctx:CParser.InitializerContext):
pass
# Exit a parse tree produced by CParser#initializer.
def exitInitializer(self, ctx:CParser.InitializerContext):
pass
# Enter a parse tree produced by CParser#initializer_list.
def enterInitializer_list(self, ctx:CParser.Initializer_listContext):
pass
# Exit a parse tree produced by CParser#initializer_list.
def exitInitializer_list(self, ctx:CParser.Initializer_listContext):
pass
# Enter a parse tree produced by CParser#argument_expression_list.
def enterArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
pass
# Exit a parse tree produced by CParser#argument_expression_list.
def exitArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
pass
# Enter a parse tree produced by CParser#additive_expression.
def enterAdditive_expression(self, ctx:CParser.Additive_expressionContext):
pass
# Exit a parse tree produced by CParser#additive_expression.
def exitAdditive_expression(self, ctx:CParser.Additive_expressionContext):
pass
# Enter a parse tree produced by CParser#multiplicative_expression.
def enterMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
pass
# Exit a parse tree produced by CParser#multiplicative_expression.
def exitMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
pass
# Enter a parse tree produced by CParser#cast_expression.
def enterCast_expression(self, ctx:CParser.Cast_expressionContext):
pass
# Exit a parse tree produced by CParser#cast_expression.
def exitCast_expression(self, ctx:CParser.Cast_expressionContext):
pass
# Enter a parse tree produced by CParser#unary_expression.
def enterUnary_expression(self, ctx:CParser.Unary_expressionContext):
pass
# Exit a parse tree produced by CParser#unary_expression.
def exitUnary_expression(self, ctx:CParser.Unary_expressionContext):
pass
# Enter a parse tree produced by CParser#postfix_expression.
def enterPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
pass
# Exit a parse tree produced by CParser#postfix_expression.
def exitPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
pass
# Enter a parse tree produced by CParser#macro_parameter_list.
def enterMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
pass
# Exit a parse tree produced by CParser#macro_parameter_list.
def exitMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
pass
# Enter a parse tree produced by CParser#unary_operator.
def enterUnary_operator(self, ctx:CParser.Unary_operatorContext):
pass
# Exit a parse tree produced by CParser#unary_operator.
def exitUnary_operator(self, ctx:CParser.Unary_operatorContext):
pass
# Enter a parse tree produced by CParser#primary_expression.
def enterPrimary_expression(self, ctx:CParser.Primary_expressionContext):
pass
# Exit a parse tree produced by CParser#primary_expression.
def exitPrimary_expression(self, ctx:CParser.Primary_expressionContext):
pass
# Enter a parse tree produced by CParser#constant.
def enterConstant(self, ctx:CParser.ConstantContext):
pass
# Exit a parse tree produced by CParser#constant.
def exitConstant(self, ctx:CParser.ConstantContext):
pass
# Enter a parse tree produced by CParser#expression.
def enterExpression(self, ctx:CParser.ExpressionContext):
pass
# Exit a parse tree produced by CParser#expression.
def exitExpression(self, ctx:CParser.ExpressionContext):
pass
# Enter a parse tree produced by CParser#constant_expression.
def enterConstant_expression(self, ctx:CParser.Constant_expressionContext):
pass
# Exit a parse tree produced by CParser#constant_expression.
def exitConstant_expression(self, ctx:CParser.Constant_expressionContext):
pass
# Enter a parse tree produced by CParser#assignment_expression.
def enterAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
pass
# Exit a parse tree produced by CParser#assignment_expression.
def exitAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
pass
# Enter a parse tree produced by CParser#lvalue.
def enterLvalue(self, ctx:CParser.LvalueContext):
pass
# Exit a parse tree produced by CParser#lvalue.
def exitLvalue(self, ctx:CParser.LvalueContext):
pass
# Enter a parse tree produced by CParser#assignment_operator.
def enterAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
pass
# Exit a parse tree produced by CParser#assignment_operator.
def exitAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
pass
# Enter a parse tree produced by CParser#conditional_expression.
def enterConditional_expression(self, ctx:CParser.Conditional_expressionContext):
pass
# Exit a parse tree produced by CParser#conditional_expression.
def exitConditional_expression(self, ctx:CParser.Conditional_expressionContext):
pass
# Enter a parse tree produced by CParser#logical_or_expression.
def enterLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
pass
# Exit a parse tree produced by CParser#logical_or_expression.
def exitLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
pass
# Enter a parse tree produced by CParser#logical_and_expression.
def enterLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
pass
# Exit a parse tree produced by CParser#logical_and_expression.
def exitLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
pass
# Enter a parse tree produced by CParser#inclusive_or_expression.
def enterInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
pass
# Exit a parse tree produced by CParser#inclusive_or_expression.
def exitInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
pass
# Enter a parse tree produced by CParser#exclusive_or_expression.
def enterExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
pass
# Exit a parse tree produced by CParser#exclusive_or_expression.
def exitExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
pass
# Enter a parse tree produced by CParser#and_expression.
def enterAnd_expression(self, ctx:CParser.And_expressionContext):
pass
# Exit a parse tree produced by CParser#and_expression.
def exitAnd_expression(self, ctx:CParser.And_expressionContext):
pass
# Enter a parse tree produced by CParser#equality_expression.
def enterEquality_expression(self, ctx:CParser.Equality_expressionContext):
pass
# Exit a parse tree produced by CParser#equality_expression.
def exitEquality_expression(self, ctx:CParser.Equality_expressionContext):
pass
# Enter a parse tree produced by CParser#relational_expression.
def enterRelational_expression(self, ctx:CParser.Relational_expressionContext):
pass
# Exit a parse tree produced by CParser#relational_expression.
def exitRelational_expression(self, ctx:CParser.Relational_expressionContext):
pass
# Enter a parse tree produced by CParser#shift_expression.
def enterShift_expression(self, ctx:CParser.Shift_expressionContext):
pass
# Exit a parse tree produced by CParser#shift_expression.
def exitShift_expression(self, ctx:CParser.Shift_expressionContext):
pass
# Enter a parse tree produced by CParser#statement.
def enterStatement(self, ctx:CParser.StatementContext):
pass
# Exit a parse tree produced by CParser#statement.
def exitStatement(self, ctx:CParser.StatementContext):
pass
# Enter a parse tree produced by CParser#asm2_statement.
def enterAsm2_statement(self, ctx:CParser.Asm2_statementContext):
pass
# Exit a parse tree produced by CParser#asm2_statement.
def exitAsm2_statement(self, ctx:CParser.Asm2_statementContext):
pass
# Enter a parse tree produced by CParser#asm1_statement.
def enterAsm1_statement(self, ctx:CParser.Asm1_statementContext):
pass
# Exit a parse tree produced by CParser#asm1_statement.
def exitAsm1_statement(self, ctx:CParser.Asm1_statementContext):
pass
# Enter a parse tree produced by CParser#asm_statement.
def enterAsm_statement(self, ctx:CParser.Asm_statementContext):
pass
# Exit a parse tree produced by CParser#asm_statement.
def exitAsm_statement(self, ctx:CParser.Asm_statementContext):
pass
# Enter a parse tree produced by CParser#macro_statement.
def enterMacro_statement(self, ctx:CParser.Macro_statementContext):
pass
# Exit a parse tree produced by CParser#macro_statement.
def exitMacro_statement(self, ctx:CParser.Macro_statementContext):
pass
# Enter a parse tree produced by CParser#labeled_statement.
def enterLabeled_statement(self, ctx:CParser.Labeled_statementContext):
pass
# Exit a parse tree produced by CParser#labeled_statement.
def exitLabeled_statement(self, ctx:CParser.Labeled_statementContext):
pass
# Enter a parse tree produced by CParser#compound_statement.
def enterCompound_statement(self, ctx:CParser.Compound_statementContext):
pass
# Exit a parse tree produced by CParser#compound_statement.
def exitCompound_statement(self, ctx:CParser.Compound_statementContext):
pass
# Enter a parse tree produced by CParser#statement_list.
def enterStatement_list(self, ctx:CParser.Statement_listContext):
pass
# Exit a parse tree produced by CParser#statement_list.
def exitStatement_list(self, ctx:CParser.Statement_listContext):
pass
# Enter a parse tree produced by CParser#expression_statement.
def enterExpression_statement(self, ctx:CParser.Expression_statementContext):
pass
# Exit a parse tree produced by CParser#expression_statement.
def exitExpression_statement(self, ctx:CParser.Expression_statementContext):
pass
# Enter a parse tree produced by CParser#selection_statement.
def enterSelection_statement(self, ctx:CParser.Selection_statementContext):
pass
# Exit a parse tree produced by CParser#selection_statement.
def exitSelection_statement(self, ctx:CParser.Selection_statementContext):
pass
# Enter a parse tree produced by CParser#iteration_statement.
def enterIteration_statement(self, ctx:CParser.Iteration_statementContext):
pass
# Exit a parse tree produced by CParser#iteration_statement.
def exitIteration_statement(self, ctx:CParser.Iteration_statementContext):
pass
# Enter a parse tree produced by CParser#jump_statement.
def enterJump_statement(self, ctx:CParser.Jump_statementContext):
pass
# Exit a parse tree produced by CParser#jump_statement.
def exitJump_statement(self, ctx:CParser.Jump_statementContext):
pass

File diff suppressed because it is too large Load Diff

View File

@ -223,7 +223,7 @@ class Check(object):
IndexOfLine = 0
for Line in op:
IndexOfLine += 1
if not bytes.decode(Line).endswith('\r\n'):
if not Line.endswith('\r\n'):
OtherMsg = "File %s has invalid line ending at line %s" % (Record[1], IndexOfLine)
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_INVALID_LINE_ENDING, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
@ -235,7 +235,7 @@ class Check(object):
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
op = open(Record[1], 'r').readlines()
op = open(Record[1], 'rb').readlines()
IndexOfLine = 0
for Line in op:
IndexOfLine += 1

View File

@ -22,7 +22,7 @@ import re
import Common.LongFilePathOs as os
import sys
import antlr4
import antlr3
from Ecc.CLexer import CLexer
from Ecc.CParser import CParser
@ -499,14 +499,13 @@ class CodeFragmentCollector:
def ParseFile(self):
self.PreprocessFile()
# restore from ListOfList to ListOfString
# print(self.Profile.FileLinesList)
self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
FileStringContents = ''
for fileLine in self.Profile.FileLinesList:
FileStringContents += fileLine
cStream = antlr4.InputStream(FileStringContents)
cStream = antlr3.StringStream(FileStringContents)
lexer = CLexer(cStream)
tStream = antlr4.CommonTokenStream(lexer)
tStream = antlr3.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
@ -517,9 +516,9 @@ class CodeFragmentCollector:
FileStringContents = ''
for fileLine in self.Profile.FileLinesList:
FileStringContents += fileLine
cStream = antlr4.InputStream(FileStringContents)
cStream = antlr3.StringStream(FileStringContents)
lexer = CLexer(cStream)
tStream = antlr4.CommonTokenStream(lexer)
tStream = antlr3.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()

View File

@ -205,7 +205,7 @@ class Ecc(object):
Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
#SkipDirs = Read from config file
SkipDirs = EccGlobalData.gConfig.SkipDirList
SkipDirString = '|'.join(SkipDirs)
SkipDirString = string.join(SkipDirs, '|')
# p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString)
p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
for scanFolder in ScanFolders:

View File

@ -47,7 +47,7 @@ class FileProfile :
self.FileLinesList = []
self.FileLinesListFromFile = []
try:
fsock = open(FileName, "r")
fsock = open(FileName, "rb", 0)
try:
self.FileLinesListFromFile = fsock.readlines()
finally:

View File

@ -113,7 +113,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
#
Last = 0
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
for Index in range(len(CommentList) - 1, 0, -1):
for Index in xrange(len(CommentList)-1, 0, -1):
Line = CommentList[Index][0]
if _IsCopyrightLine(Line):
Last = Index

View File

@ -35,7 +35,7 @@ IgnoredKeywordList = ['EFI_ERROR']
def GetIgnoredDirListPattern():
skipList = list(EccGlobalData.gConfig.SkipDirList) + ['.svn']
DirString = '|'.join(skipList)
DirString = string.join(skipList, '|')
p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % DirString)
return p
@ -963,7 +963,7 @@ def StripComments(Str):
ListFromStr[Index] = ' '
Index += 1
# check for // comment
elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '/':
elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '/' and ListFromStr[Index + 2] != '\n':
InComment = True
DoubleSlashComment = True
@ -1297,7 +1297,7 @@ def CheckFuncLayoutReturnType(FullFileName):
Result0 = Result[0]
if Result0.upper().startswith('STATIC'):
Result0 = Result0[6:].strip()
Index = Result0.find(TypeStart)
Index = Result0.find(ReturnType)
if Index != 0 or Result[3] != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, 'Function', Result[1])

File diff suppressed because it is too large Load Diff

View File

@ -1,672 +0,0 @@
# Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .CParser import CParser
else:
from CParser import CParser
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at:
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
# This class defines a complete listener for a parse tree produced by CParser.
class CListener(ParseTreeListener):
# Enter a parse tree produced by CParser#translation_unit.
def enterTranslation_unit(self, ctx:CParser.Translation_unitContext):
pass
# Exit a parse tree produced by CParser#translation_unit.
def exitTranslation_unit(self, ctx:CParser.Translation_unitContext):
pass
# Enter a parse tree produced by CParser#external_declaration.
def enterExternal_declaration(self, ctx:CParser.External_declarationContext):
pass
# Exit a parse tree produced by CParser#external_declaration.
def exitExternal_declaration(self, ctx:CParser.External_declarationContext):
pass
# Enter a parse tree produced by CParser#function_definition.
def enterFunction_definition(self, ctx:CParser.Function_definitionContext):
pass
# Exit a parse tree produced by CParser#function_definition.
def exitFunction_definition(self, ctx:CParser.Function_definitionContext):
pass
# Enter a parse tree produced by CParser#declaration_specifiers.
def enterDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
pass
# Exit a parse tree produced by CParser#declaration_specifiers.
def exitDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
pass
# Enter a parse tree produced by CParser#declaration.
def enterDeclaration(self, ctx:CParser.DeclarationContext):
pass
# Exit a parse tree produced by CParser#declaration.
def exitDeclaration(self, ctx:CParser.DeclarationContext):
pass
# Enter a parse tree produced by CParser#init_declarator_list.
def enterInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
pass
# Exit a parse tree produced by CParser#init_declarator_list.
def exitInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
pass
# Enter a parse tree produced by CParser#init_declarator.
def enterInit_declarator(self, ctx:CParser.Init_declaratorContext):
pass
# Exit a parse tree produced by CParser#init_declarator.
def exitInit_declarator(self, ctx:CParser.Init_declaratorContext):
pass
# Enter a parse tree produced by CParser#storage_class_specifier.
def enterStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
pass
# Exit a parse tree produced by CParser#storage_class_specifier.
def exitStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
pass
# Enter a parse tree produced by CParser#type_specifier.
def enterType_specifier(self, ctx:CParser.Type_specifierContext):
pass
# Exit a parse tree produced by CParser#type_specifier.
def exitType_specifier(self, ctx:CParser.Type_specifierContext):
pass
# Enter a parse tree produced by CParser#type_id.
def enterType_id(self, ctx:CParser.Type_idContext):
pass
# Exit a parse tree produced by CParser#type_id.
def exitType_id(self, ctx:CParser.Type_idContext):
pass
# Enter a parse tree produced by CParser#struct_or_union_specifier.
def enterStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
pass
# Exit a parse tree produced by CParser#struct_or_union_specifier.
def exitStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
pass
# Enter a parse tree produced by CParser#struct_or_union.
def enterStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
pass
# Exit a parse tree produced by CParser#struct_or_union.
def exitStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
pass
# Enter a parse tree produced by CParser#struct_declaration_list.
def enterStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
pass
# Exit a parse tree produced by CParser#struct_declaration_list.
def exitStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
pass
# Enter a parse tree produced by CParser#struct_declaration.
def enterStruct_declaration(self, ctx:CParser.Struct_declarationContext):
pass
# Exit a parse tree produced by CParser#struct_declaration.
def exitStruct_declaration(self, ctx:CParser.Struct_declarationContext):
pass
# Enter a parse tree produced by CParser#specifier_qualifier_list.
def enterSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
pass
# Exit a parse tree produced by CParser#specifier_qualifier_list.
def exitSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
pass
# Enter a parse tree produced by CParser#struct_declarator_list.
def enterStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
pass
# Exit a parse tree produced by CParser#struct_declarator_list.
def exitStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
pass
# Enter a parse tree produced by CParser#struct_declarator.
def enterStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
pass
# Exit a parse tree produced by CParser#struct_declarator.
def exitStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
pass
# Enter a parse tree produced by CParser#enum_specifier.
def enterEnum_specifier(self, ctx:CParser.Enum_specifierContext):
pass
# Exit a parse tree produced by CParser#enum_specifier.
def exitEnum_specifier(self, ctx:CParser.Enum_specifierContext):
pass
# Enter a parse tree produced by CParser#enumerator_list.
def enterEnumerator_list(self, ctx:CParser.Enumerator_listContext):
pass
# Exit a parse tree produced by CParser#enumerator_list.
def exitEnumerator_list(self, ctx:CParser.Enumerator_listContext):
pass
# Enter a parse tree produced by CParser#enumerator.
def enterEnumerator(self, ctx:CParser.EnumeratorContext):
pass
# Exit a parse tree produced by CParser#enumerator.
def exitEnumerator(self, ctx:CParser.EnumeratorContext):
pass
# Enter a parse tree produced by CParser#type_qualifier.
def enterType_qualifier(self, ctx:CParser.Type_qualifierContext):
pass
# Exit a parse tree produced by CParser#type_qualifier.
def exitType_qualifier(self, ctx:CParser.Type_qualifierContext):
pass
# Enter a parse tree produced by CParser#declarator.
def enterDeclarator(self, ctx:CParser.DeclaratorContext):
pass
# Exit a parse tree produced by CParser#declarator.
def exitDeclarator(self, ctx:CParser.DeclaratorContext):
pass
# Enter a parse tree produced by CParser#direct_declarator.
def enterDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
pass
# Exit a parse tree produced by CParser#direct_declarator.
def exitDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
pass
# Enter a parse tree produced by CParser#declarator_suffix.
def enterDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
pass
# Exit a parse tree produced by CParser#declarator_suffix.
def exitDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
pass
# Enter a parse tree produced by CParser#pointer.
def enterPointer(self, ctx:CParser.PointerContext):
pass
# Exit a parse tree produced by CParser#pointer.
def exitPointer(self, ctx:CParser.PointerContext):
pass
# Enter a parse tree produced by CParser#parameter_type_list.
def enterParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
pass
# Exit a parse tree produced by CParser#parameter_type_list.
def exitParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
pass
# Enter a parse tree produced by CParser#parameter_list.
def enterParameter_list(self, ctx:CParser.Parameter_listContext):
pass
# Exit a parse tree produced by CParser#parameter_list.
def exitParameter_list(self, ctx:CParser.Parameter_listContext):
pass
# Enter a parse tree produced by CParser#parameter_declaration.
def enterParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
pass
# Exit a parse tree produced by CParser#parameter_declaration.
def exitParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
pass
# Enter a parse tree produced by CParser#identifier_list.
def enterIdentifier_list(self, ctx:CParser.Identifier_listContext):
pass
# Exit a parse tree produced by CParser#identifier_list.
def exitIdentifier_list(self, ctx:CParser.Identifier_listContext):
pass
# Enter a parse tree produced by CParser#type_name.
def enterType_name(self, ctx:CParser.Type_nameContext):
pass
# Exit a parse tree produced by CParser#type_name.
def exitType_name(self, ctx:CParser.Type_nameContext):
pass
# Enter a parse tree produced by CParser#abstract_declarator.
def enterAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
pass
# Exit a parse tree produced by CParser#abstract_declarator.
def exitAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
pass
# Enter a parse tree produced by CParser#direct_abstract_declarator.
def enterDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
pass
# Exit a parse tree produced by CParser#direct_abstract_declarator.
def exitDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
pass
# Enter a parse tree produced by CParser#abstract_declarator_suffix.
def enterAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
pass
# Exit a parse tree produced by CParser#abstract_declarator_suffix.
def exitAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
pass
# Enter a parse tree produced by CParser#initializer.
def enterInitializer(self, ctx:CParser.InitializerContext):
pass
# Exit a parse tree produced by CParser#initializer.
def exitInitializer(self, ctx:CParser.InitializerContext):
pass
# Enter a parse tree produced by CParser#initializer_list.
def enterInitializer_list(self, ctx:CParser.Initializer_listContext):
pass
# Exit a parse tree produced by CParser#initializer_list.
def exitInitializer_list(self, ctx:CParser.Initializer_listContext):
pass
# Enter a parse tree produced by CParser#argument_expression_list.
def enterArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
pass
# Exit a parse tree produced by CParser#argument_expression_list.
def exitArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
pass
# Enter a parse tree produced by CParser#additive_expression.
def enterAdditive_expression(self, ctx:CParser.Additive_expressionContext):
pass
# Exit a parse tree produced by CParser#additive_expression.
def exitAdditive_expression(self, ctx:CParser.Additive_expressionContext):
pass
# Enter a parse tree produced by CParser#multiplicative_expression.
def enterMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
pass
# Exit a parse tree produced by CParser#multiplicative_expression.
def exitMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
pass
# Enter a parse tree produced by CParser#cast_expression.
def enterCast_expression(self, ctx:CParser.Cast_expressionContext):
pass
# Exit a parse tree produced by CParser#cast_expression.
def exitCast_expression(self, ctx:CParser.Cast_expressionContext):
pass
# Enter a parse tree produced by CParser#unary_expression.
def enterUnary_expression(self, ctx:CParser.Unary_expressionContext):
pass
# Exit a parse tree produced by CParser#unary_expression.
def exitUnary_expression(self, ctx:CParser.Unary_expressionContext):
pass
# Enter a parse tree produced by CParser#postfix_expression.
def enterPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
pass
# Exit a parse tree produced by CParser#postfix_expression.
def exitPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
pass
# Enter a parse tree produced by CParser#macro_parameter_list.
def enterMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
pass
# Exit a parse tree produced by CParser#macro_parameter_list.
def exitMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
pass
# Enter a parse tree produced by CParser#unary_operator.
def enterUnary_operator(self, ctx:CParser.Unary_operatorContext):
pass
# Exit a parse tree produced by CParser#unary_operator.
def exitUnary_operator(self, ctx:CParser.Unary_operatorContext):
pass
# Enter a parse tree produced by CParser#primary_expression.
def enterPrimary_expression(self, ctx:CParser.Primary_expressionContext):
pass
# Exit a parse tree produced by CParser#primary_expression.
def exitPrimary_expression(self, ctx:CParser.Primary_expressionContext):
pass
# Enter a parse tree produced by CParser#constant.
def enterConstant(self, ctx:CParser.ConstantContext):
pass
# Exit a parse tree produced by CParser#constant.
def exitConstant(self, ctx:CParser.ConstantContext):
pass
# Enter a parse tree produced by CParser#expression.
def enterExpression(self, ctx:CParser.ExpressionContext):
pass
# Exit a parse tree produced by CParser#expression.
def exitExpression(self, ctx:CParser.ExpressionContext):
pass
# Enter a parse tree produced by CParser#constant_expression.
def enterConstant_expression(self, ctx:CParser.Constant_expressionContext):
pass
# Exit a parse tree produced by CParser#constant_expression.
def exitConstant_expression(self, ctx:CParser.Constant_expressionContext):
pass
# Enter a parse tree produced by CParser#assignment_expression.
def enterAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
pass
# Exit a parse tree produced by CParser#assignment_expression.
def exitAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
pass
# Enter a parse tree produced by CParser#lvalue.
def enterLvalue(self, ctx:CParser.LvalueContext):
pass
# Exit a parse tree produced by CParser#lvalue.
def exitLvalue(self, ctx:CParser.LvalueContext):
pass
# Enter a parse tree produced by CParser#assignment_operator.
def enterAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
pass
# Exit a parse tree produced by CParser#assignment_operator.
def exitAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
pass
# Enter a parse tree produced by CParser#conditional_expression.
def enterConditional_expression(self, ctx:CParser.Conditional_expressionContext):
pass
# Exit a parse tree produced by CParser#conditional_expression.
def exitConditional_expression(self, ctx:CParser.Conditional_expressionContext):
pass
# Enter a parse tree produced by CParser#logical_or_expression.
def enterLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
pass
# Exit a parse tree produced by CParser#logical_or_expression.
def exitLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
pass
# Enter a parse tree produced by CParser#logical_and_expression.
def enterLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
pass
# Exit a parse tree produced by CParser#logical_and_expression.
def exitLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
pass
# Enter a parse tree produced by CParser#inclusive_or_expression.
def enterInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
pass
# Exit a parse tree produced by CParser#inclusive_or_expression.
def exitInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
pass
# Enter a parse tree produced by CParser#exclusive_or_expression.
def enterExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
pass
# Exit a parse tree produced by CParser#exclusive_or_expression.
def exitExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
pass
# Enter a parse tree produced by CParser#and_expression.
def enterAnd_expression(self, ctx:CParser.And_expressionContext):
pass
# Exit a parse tree produced by CParser#and_expression.
def exitAnd_expression(self, ctx:CParser.And_expressionContext):
pass
# Enter a parse tree produced by CParser#equality_expression.
def enterEquality_expression(self, ctx:CParser.Equality_expressionContext):
pass
# Exit a parse tree produced by CParser#equality_expression.
def exitEquality_expression(self, ctx:CParser.Equality_expressionContext):
pass
# Enter a parse tree produced by CParser#relational_expression.
def enterRelational_expression(self, ctx:CParser.Relational_expressionContext):
pass
# Exit a parse tree produced by CParser#relational_expression.
def exitRelational_expression(self, ctx:CParser.Relational_expressionContext):
pass
# Enter a parse tree produced by CParser#shift_expression.
def enterShift_expression(self, ctx:CParser.Shift_expressionContext):
pass
# Exit a parse tree produced by CParser#shift_expression.
def exitShift_expression(self, ctx:CParser.Shift_expressionContext):
pass
# Enter a parse tree produced by CParser#statement.
def enterStatement(self, ctx:CParser.StatementContext):
pass
# Exit a parse tree produced by CParser#statement.
def exitStatement(self, ctx:CParser.StatementContext):
pass
# Enter a parse tree produced by CParser#asm2_statement.
def enterAsm2_statement(self, ctx:CParser.Asm2_statementContext):
pass
# Exit a parse tree produced by CParser#asm2_statement.
def exitAsm2_statement(self, ctx:CParser.Asm2_statementContext):
pass
# Enter a parse tree produced by CParser#asm1_statement.
def enterAsm1_statement(self, ctx:CParser.Asm1_statementContext):
pass
# Exit a parse tree produced by CParser#asm1_statement.
def exitAsm1_statement(self, ctx:CParser.Asm1_statementContext):
pass
# Enter a parse tree produced by CParser#asm_statement.
def enterAsm_statement(self, ctx:CParser.Asm_statementContext):
pass
# Exit a parse tree produced by CParser#asm_statement.
def exitAsm_statement(self, ctx:CParser.Asm_statementContext):
pass
# Enter a parse tree produced by CParser#macro_statement.
def enterMacro_statement(self, ctx:CParser.Macro_statementContext):
pass
# Exit a parse tree produced by CParser#macro_statement.
def exitMacro_statement(self, ctx:CParser.Macro_statementContext):
pass
# Enter a parse tree produced by CParser#labeled_statement.
def enterLabeled_statement(self, ctx:CParser.Labeled_statementContext):
pass
# Exit a parse tree produced by CParser#labeled_statement.
def exitLabeled_statement(self, ctx:CParser.Labeled_statementContext):
pass
# Enter a parse tree produced by CParser#compound_statement.
def enterCompound_statement(self, ctx:CParser.Compound_statementContext):
pass
# Exit a parse tree produced by CParser#compound_statement.
def exitCompound_statement(self, ctx:CParser.Compound_statementContext):
pass
# Enter a parse tree produced by CParser#statement_list.
def enterStatement_list(self, ctx:CParser.Statement_listContext):
pass
# Exit a parse tree produced by CParser#statement_list.
def exitStatement_list(self, ctx:CParser.Statement_listContext):
pass
# Enter a parse tree produced by CParser#expression_statement.
def enterExpression_statement(self, ctx:CParser.Expression_statementContext):
pass
# Exit a parse tree produced by CParser#expression_statement.
def exitExpression_statement(self, ctx:CParser.Expression_statementContext):
pass
# Enter a parse tree produced by CParser#selection_statement.
def enterSelection_statement(self, ctx:CParser.Selection_statementContext):
pass
# Exit a parse tree produced by CParser#selection_statement.
def exitSelection_statement(self, ctx:CParser.Selection_statementContext):
pass
# Enter a parse tree produced by CParser#iteration_statement.
def enterIteration_statement(self, ctx:CParser.Iteration_statementContext):
pass
# Exit a parse tree produced by CParser#iteration_statement.
def exitIteration_statement(self, ctx:CParser.Iteration_statementContext):
pass
# Enter a parse tree produced by CParser#jump_statement.
def enterJump_statement(self, ctx:CParser.Jump_statementContext):
pass
# Exit a parse tree produced by CParser#jump_statement.
def exitJump_statement(self, ctx:CParser.Jump_statementContext):
pass

File diff suppressed because it is too large Load Diff

View File

@ -21,7 +21,7 @@ import re
import Common.LongFilePathOs as os
import sys
import antlr4
import antlr3
from .CLexer import CLexer
from .CParser import CParser

View File

@ -17,20 +17,18 @@
from __future__ import absolute_import
import Common.LongFilePathOs as os, time, glob
import Common.EdkLogger as EdkLogger
from Eot import EotGlobalData
from . import EotGlobalData
from optparse import OptionParser
from Common.StringUtils import NormPath
from Common import BuildToolError
from Common.Misc import GuidStructureStringToGuidString, sdict
from Eot.Parser import *
from Eot.InfParserLite import EdkInfParser
from Common.StringUtils import GetSplitValueList
from Eot import c
from Eot import Database
from .InfParserLite import *
from . import c
from . import Database
from array import array
from Eot.Report import Report
from .Report import Report
from Common.BuildVersion import gBUILD_VERSION
from Eot.Parser import ConvertGuid
from .Parser import ConvertGuid
from Common.LongFilePathSupport import OpenLongFilePath as open
import struct
import uuid
@ -60,14 +58,14 @@ class Image(array):
self._SubImages = sdict() # {offset: Image()}
array.__init__(self)
array.__init__(self, 'B')
def __repr__(self):
return self._ID_
def __len__(self):
Len = array.__len__(self)
for Offset in self._SubImages.keys():
for Offset in self._SubImages:
Len += len(self._SubImages[Offset])
return Len
@ -156,11 +154,19 @@ class CompressedImage(Image):
def _GetSections(self):
try:
TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:])
from . import EfiCompressor
TmpData = EfiCompressor.FrameworkDecompress(
self[self._HEADER_SIZE_:],
len(self) - self._HEADER_SIZE_
)
DecData = array('B')
DecData.fromstring(TmpData)
except:
TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:])
from . import EfiCompressor
TmpData = EfiCompressor.UefiDecompress(
self[self._HEADER_SIZE_:],
len(self) - self._HEADER_SIZE_
)
DecData = array('B')
DecData.fromstring(TmpData)
@ -291,7 +297,7 @@ class Depex(Image):
Expression = property(_GetExpression)
# # FirmwareVolume() class
## FirmwareVolume() class
#
# A class for Firmware Volume
#
@ -302,12 +308,12 @@ class FirmwareVolume(Image):
_FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3"
_GUID_ = struct.Struct("16x 1I2H8B")
_LENGTH_ = struct.Struct("16x 16x 1Q")
_SIG_ = struct.Struct("16x 16x 8x 1I")
_ATTR_ = struct.Struct("16x 16x 8x 4x 1I")
_HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H")
_CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H")
_GUID_ = struct.Struct("16x 1I2H8B")
_LENGTH_ = struct.Struct("16x 16x 1Q")
_SIG_ = struct.Struct("16x 16x 8x 1I")
_ATTR_ = struct.Struct("16x 16x 8x 4x 1I")
_HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H")
_CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H")
def __init__(self, Name=''):
Image.__init__(self)
@ -381,7 +387,7 @@ class FirmwareVolume(Image):
DepexString = DepexList[0].strip()
return (CouldBeLoaded, DepexString, FileDepex)
def Dispatch(self, Db=None):
def Dispatch(self, Db = None):
if Db is None:
return False
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
@ -391,7 +397,7 @@ class FirmwareVolume(Image):
FfsDxeCoreGuid = None
FfsPeiPrioriGuid = None
FfsDxePrioriGuid = None
for FfsID in self.UnDispatchedFfsDict.keys():
for FfsID in self.UnDispatchedFfsDict:
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x03:
FfsSecCoreGuid = FfsID
@ -433,7 +439,6 @@ class FirmwareVolume(Image):
if GuidString in self.UnDispatchedFfsDict:
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
self.LoadPpi(Db, GuidString)
self.DisPatchPei(Db)
# Parse DXE then
@ -455,7 +460,6 @@ class FirmwareVolume(Image):
if GuidString in self.UnDispatchedFfsDict:
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
self.LoadProtocol(Db, GuidString)
self.DisPatchDxe(Db)
def LoadProtocol(self, Db, ModuleGuid):
@ -497,7 +501,7 @@ class FirmwareVolume(Image):
def DisPatchDxe(self, Db):
IsInstalled = False
ScheduleList = sdict()
for FfsID in self.UnDispatchedFfsDict.keys():
for FfsID in self.UnDispatchedFfsDict:
CouldBeLoaded = False
DepexString = ''
FileDepex = None
@ -544,7 +548,7 @@ class FirmwareVolume(Image):
else:
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
for FfsID in ScheduleList.keys():
for FfsID in ScheduleList:
NewFfs = ScheduleList.pop(FfsID)
FfsName = 'UnKnown'
self.OrderedFfsDict[FfsID] = NewFfs
@ -556,13 +560,12 @@ class FirmwareVolume(Image):
RecordSet = Db.TblReport.Exec(SqlCommand)
if RecordSet != []:
FfsName = RecordSet[0][0]
if IsInstalled:
self.DisPatchDxe(Db)
def DisPatchPei(self, Db):
IsInstalled = False
for FfsID in self.UnDispatchedFfsDict.keys():
for FfsID in self.UnDispatchedFfsDict:
CouldBeLoaded = True
DepexString = ''
FileDepex = None
@ -573,6 +576,7 @@ class FirmwareVolume(Image):
if Section.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
break
if Section.Type == 0x01:
CompressSections = Section._SubImages[4]
for CompressSection in CompressSections.Sections:
@ -599,12 +603,11 @@ class FirmwareVolume(Image):
if IsInstalled:
self.DisPatchPei(Db)
def __str__(self):
global gIndention
gIndention += 4
FvInfo = '\n' + ' ' * gIndention
FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum)
FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum)
FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict])
gIndention -= 4
return FvInfo + FfsInfo
@ -612,7 +615,7 @@ class FirmwareVolume(Image):
def _Unpack(self):
Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
self.empty()
self.extend(self._BUF_[self._OFF_:self._OFF_ + Size])
self.extend(self._BUF_[self._OFF_:self._OFF_+Size])
# traverse the FFS
EndOfFv = Size
@ -740,9 +743,10 @@ class GuidDefinedImage(Image):
SectionList.append(Sec)
elif Guid == self.TIANO_COMPRESS_GUID:
try:
from . import EfiCompressor
# skip the header
Offset = self.DataOffset - 4
TmpData = DeCompress('Framework', self[self.Offset:])
TmpData = EfiCompressor.FrameworkDecompress(self[Offset:], len(self)-Offset)
DecData = array('B')
DecData.fromstring(TmpData)
Offset = 0
@ -760,10 +764,10 @@ class GuidDefinedImage(Image):
pass
elif Guid == self.LZMA_COMPRESS_GUID:
try:
from . import LzmaCompressor
# skip the header
Offset = self.DataOffset - 4
TmpData = DeCompress('Lzma', self[self.Offset:])
TmpData = LzmaCompressor.LzmaDecompress(self[Offset:], len(self)-Offset)
DecData = array('B')
DecData.fromstring(TmpData)
Offset = 0
@ -844,7 +848,7 @@ class Section(Image):
SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size)
else:
SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size)
for Offset in self._SubImages.keys():
for Offset in self._SubImages:
SectionInfo += ", " + str(self._SubImages[Offset])
gIndention -= 4
return SectionInfo
@ -978,7 +982,7 @@ class Ffs(Image):
FfsInfo = Indention
FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \
(Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment)
SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()])
SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections])
gIndention -= 4
return FfsInfo + SectionInfo + "\n"
@ -1083,6 +1087,379 @@ class Ffs(Image):
Alignment = property(_GetAlignment)
State = property(_GetState, _SetState)
## FirmwareVolume() class
#
# A class for Firmware Volume
#
class FirmwareVolume(Image):
# Read FvLength, Attributes, HeaderLength, Checksum
_HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H")
_HEADER_SIZE_ = _HEADER_.size
_FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3"
_GUID_ = struct.Struct("16x 1I2H8B")
_LENGTH_ = struct.Struct("16x 16x 1Q")
_SIG_ = struct.Struct("16x 16x 8x 1I")
_ATTR_ = struct.Struct("16x 16x 8x 4x 1I")
_HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H")
_CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H")
def __init__(self, Name=''):
Image.__init__(self)
self.Name = Name
self.FfsDict = sdict()
self.OrderedFfsDict = sdict()
self.UnDispatchedFfsDict = sdict()
self.ProtocolList = sdict()
def CheckArchProtocol(self):
for Item in EotGlobalData.gArchProtocolGuids:
if Item.lower() not in EotGlobalData.gProtocolList:
return False
return True
def ParseDepex(self, Depex, Type):
List = None
if Type == 'Ppi':
List = EotGlobalData.gPpiList
if Type == 'Protocol':
List = EotGlobalData.gProtocolList
DepexStack = []
DepexList = []
DepexString = ''
FileDepex = None
CouldBeLoaded = True
for Index in range(0, len(Depex.Expression)):
Item = Depex.Expression[Index]
if Item == 0x00:
Index = Index + 1
Guid = gGuidStringFormat % Depex.Expression[Index]
if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE'])
elif Item == 0x01:
Index = Index + 1
Guid = gGuidStringFormat % Depex.Expression[Index]
if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
return (True, 'AFTER %s' % Guid, [Guid, 'AFTER'])
elif Item == 0x02:
Index = Index + 1
Guid = gGuidStringFormat % Depex.Expression[Index]
if Guid.lower() in List:
DepexStack.append(True)
DepexList.append(Guid)
else:
DepexStack.append(False)
DepexList.append(Guid)
continue
elif Item == 0x03 or Item == 0x04:
DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop()))
elif Item == 0x05:
DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop()))
elif Item == 0x06:
DepexStack.append(True)
DepexList.append('TRUE')
DepexString = DepexString + 'TRUE' + ' '
elif Item == 0x07:
DepexStack.append(False)
DepexList.append('False')
DepexString = DepexString + 'FALSE' + ' '
elif Item == 0x08:
if Index != len(Depex.Expression) - 1:
CouldBeLoaded = False
else:
CouldBeLoaded = DepexStack.pop()
else:
CouldBeLoaded = False
if DepexList != []:
DepexString = DepexList[0].strip()
return (CouldBeLoaded, DepexString, FileDepex)
def Dispatch(self, Db = None):
if Db is None:
return False
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
# Find PeiCore, DexCore, PeiPriori, DxePriori first
FfsSecCoreGuid = None
FfsPeiCoreGuid = None
FfsDxeCoreGuid = None
FfsPeiPrioriGuid = None
FfsDxePrioriGuid = None
for FfsID in self.UnDispatchedFfsDict:
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x03:
FfsSecCoreGuid = FfsID
continue
if Ffs.Type == 0x04:
FfsPeiCoreGuid = FfsID
continue
if Ffs.Type == 0x05:
FfsDxeCoreGuid = FfsID
continue
if Ffs.Guid.lower() == gPeiAprioriFileNameGuid:
FfsPeiPrioriGuid = FfsID
continue
if Ffs.Guid.lower() == gAprioriGuid:
FfsDxePrioriGuid = FfsID
continue
# Parse SEC_CORE first
if FfsSecCoreGuid is not None:
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
self.LoadPpi(Db, FfsSecCoreGuid)
# Parse PEI first
if FfsPeiCoreGuid is not None:
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
self.LoadPpi(Db, FfsPeiCoreGuid)
if FfsPeiPrioriGuid is not None:
# Load PEIM described in priori file
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
if len(FfsPeiPriori.Sections) == 1:
Section = FfsPeiPriori.Sections.popitem()[1]
if Section.Type == 0x19:
GuidStruct = struct.Struct('1I2H8B')
Start = 4
while len(Section) > Start:
Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
GuidString = gGuidStringFormat % Guid
Start = Start + 16
if GuidString in self.UnDispatchedFfsDict:
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
self.LoadPpi(Db, GuidString)
self.DisPatchPei(Db)
# Parse DXE then
if FfsDxeCoreGuid is not None:
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
self.LoadProtocol(Db, FfsDxeCoreGuid)
if FfsDxePrioriGuid is not None:
# Load PEIM described in priori file
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
if len(FfsDxePriori.Sections) == 1:
Section = FfsDxePriori.Sections.popitem()[1]
if Section.Type == 0x19:
GuidStruct = struct.Struct('1I2H8B')
Start = 4
while len(Section) > Start:
Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
GuidString = gGuidStringFormat % Guid
Start = Start + 16
if GuidString in self.UnDispatchedFfsDict:
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
self.LoadProtocol(Db, GuidString)
self.DisPatchDxe(Db)
def LoadProtocol(self, Db, ModuleGuid):
SqlCommand = """select GuidValue from Report
where SourceFileFullPath in
(select Value1 from Inf where BelongsToFile =
(select BelongsToFile from Inf
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
and Model = %s)
and ItemType = 'Protocol' and ItemMode = 'Produced'""" \
% (ModuleGuid, 5001, 3007)
RecordSet = Db.TblReport.Exec(SqlCommand)
for Record in RecordSet:
SqlCommand = """select Value2 from Inf where BelongsToFile =
(select DISTINCT BelongsToFile from Inf
where Value1 =
(select SourceFileFullPath from Report
where GuidValue like '%s' and ItemMode = 'Callback'))
and Value1 = 'FILE_GUID'""" % Record[0]
CallBackSet = Db.TblReport.Exec(SqlCommand)
if CallBackSet != []:
EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
else:
EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
def LoadPpi(self, Db, ModuleGuid):
SqlCommand = """select GuidValue from Report
where SourceFileFullPath in
(select Value1 from Inf where BelongsToFile =
(select BelongsToFile from Inf
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
and Model = %s)
and ItemType = 'Ppi' and ItemMode = 'Produced'""" \
% (ModuleGuid, 5001, 3007)
RecordSet = Db.TblReport.Exec(SqlCommand)
for Record in RecordSet:
EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid
def DisPatchDxe(self, Db):
IsInstalled = False
ScheduleList = sdict()
for FfsID in self.UnDispatchedFfsDict:
CouldBeLoaded = False
DepexString = ''
FileDepex = None
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x07:
# Get Depex
IsFoundDepex = False
for Section in Ffs.Sections.values():
# Find Depex
if Section.Type == 0x13:
IsFoundDepex = True
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol')
break
if Section.Type == 0x01:
CompressSections = Section._SubImages[4]
for CompressSection in CompressSections.Sections:
if CompressSection.Type == 0x13:
IsFoundDepex = True
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol')
break
if CompressSection.Type == 0x02:
NewSections = CompressSection._SubImages[4]
for NewSection in NewSections.Sections:
if NewSection.Type == 0x13:
IsFoundDepex = True
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol')
break
# Not find Depex
if not IsFoundDepex:
CouldBeLoaded = self.CheckArchProtocol()
DepexString = ''
FileDepex = None
# Append New Ffs
if CouldBeLoaded:
IsInstalled = True
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
NewFfs.Depex = DepexString
if FileDepex is not None:
ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
else:
ScheduleList[FfsID] = NewFfs
else:
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
for FfsID in ScheduleList:
NewFfs = ScheduleList.pop(FfsID)
FfsName = 'UnKnown'
self.OrderedFfsDict[FfsID] = NewFfs
self.LoadProtocol(Db, FfsID)
SqlCommand = """select Value2 from Inf
where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001)
RecordSet = Db.TblReport.Exec(SqlCommand)
if RecordSet != []:
FfsName = RecordSet[0][0]
if IsInstalled:
self.DisPatchDxe(Db)
def DisPatchPei(self, Db):
IsInstalled = False
for FfsID in self.UnDispatchedFfsDict:
CouldBeLoaded = True
DepexString = ''
FileDepex = None
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x06 or Ffs.Type == 0x08:
# Get Depex
for Section in Ffs.Sections.values():
if Section.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
break
if Section.Type == 0x01:
CompressSections = Section._SubImages[4]
for CompressSection in CompressSections.Sections:
if CompressSection.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi')
break
if CompressSection.Type == 0x02:
NewSections = CompressSection._SubImages[4]
for NewSection in NewSections.Sections:
if NewSection.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi')
break
# Append New Ffs
if CouldBeLoaded:
IsInstalled = True
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
NewFfs.Depex = DepexString
self.OrderedFfsDict[FfsID] = NewFfs
self.LoadPpi(Db, FfsID)
else:
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
if IsInstalled:
self.DisPatchPei(Db)
def __str__(self):
global gIndention
gIndention += 4
FvInfo = '\n' + ' ' * gIndention
FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum)
FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict])
gIndention -= 4
return FvInfo + FfsInfo
def _Unpack(self):
Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
self.empty()
self.extend(self._BUF_[self._OFF_:self._OFF_+Size])
# traverse the FFS
EndOfFv = Size
FfsStartAddress = self.HeaderSize
LastFfsObj = None
while FfsStartAddress < EndOfFv:
FfsObj = Ffs()
FfsObj.frombuffer(self, FfsStartAddress)
FfsId = repr(FfsObj)
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
if LastFfsObj is not None:
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
else:
if FfsId in self.FfsDict:
EdkLogger.error("FV", 0, "Duplicate GUID in FFS",
ExtraData="\t%s @ %s\n\t%s @ %s" \
% (FfsObj.Guid, FfsObj.Offset,
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
self.FfsDict[FfsId] = FfsObj
if LastFfsObj is not None:
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
FfsStartAddress += len(FfsObj)
#
# align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1))
# The next FFS must be at the latest next 8-byte aligned address
#
FfsStartAddress = (FfsStartAddress + 7) & (~7)
LastFfsObj = FfsObj
def _GetAttributes(self):
return self.GetField(self._ATTR_, 0)[0]
def _GetSize(self):
return self.GetField(self._LENGTH_, 0)[0]
def _GetChecksum(self):
return self.GetField(self._CHECKSUM_, 0)[0]
def _GetHeaderLength(self):
return self.GetField(self._HLEN_, 0)[0]
def _GetFileSystemGuid(self):
return gGuidStringFormat % self.GetField(self._GUID_, 0)
Attributes = property(_GetAttributes)
Size = property(_GetSize)
Checksum = property(_GetChecksum)
HeaderSize = property(_GetHeaderLength)
FileSystemGuid = property(_GetFileSystemGuid)
## MultipleFv() class
#
@ -1093,10 +1470,8 @@ class MultipleFv(FirmwareVolume):
FirmwareVolume.__init__(self)
self.BasicInfo = []
for FvPath in FvList:
Fd = None
FvName = os.path.splitext(os.path.split(FvPath)[1])[0]
if FvPath.strip():
Fd = open(FvPath, 'rb')
Fd = open(FvPath, 'rb')
Buf = array('B')
try:
Buf.fromfile(Fd, os.path.getsize(FvPath))
@ -1257,9 +1632,8 @@ class Eot(object):
Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList)
if os.path.isfile(Path):
for Line in open(Path):
if Line.strip():
(GuidName, GuidValue) = Line.split()
EotGlobalData.gGuidDict[GuidName] = GuidValue
(GuidName, GuidValue) = Line.split()
EotGlobalData.gGuidDict[GuidName] = GuidValue
## ConvertLogFile() method
#
@ -1320,7 +1694,7 @@ class Eot(object):
mCurrentSourceFileList = []
if SourceFileList:
sfl = open(SourceFileList, 'r')
sfl = open(SourceFileList, 'rb')
for line in sfl:
line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
if line[-2:].upper() == '.C' or line[-2:].upper() == '.H':
@ -1596,8 +1970,6 @@ class Eot(object):
def BuildMetaDataFileDatabase(self, Inf_Files):
EdkLogger.quiet("Building database for meta data files ...")
for InfFile in Inf_Files:
if not InfFile:
continue
EdkLogger.quiet("Parsing %s ..." % str(InfFile))
EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile], '')
@ -1711,10 +2083,7 @@ if __name__ == '__main__':
EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
StartTime = time.clock()
Eot = Eot(CommandLineOption=False,
SourceFileList=r'C:\TestEot\Source.txt',
GuidList=r'C:\TestEot\Guid.txt',
FvFileList=r'C:\TestEot\FVRECOVERY.Fv')
Eot = Eot()
FinishTime = time.clock()
BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))

View File

@ -22,8 +22,8 @@ from Common.DataType import *
from CommonDataClass.DataClass import *
from Common.Identification import *
from Common.StringUtils import *
from Eot.Parser import *
from Eot import Database
from .Parser import *
from . import Database
## EdkInfParser() class
#
@ -153,3 +153,21 @@ class EdkInfParser(object):
self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
#End of For
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.QUIET)
Db = Database.Database('Inf.db')
Db.InitDatabase()
P = EdkInfParser(os.path.normpath("C:\Framework\Edk\Sample\Platform\Nt32\Dxe\PlatformBds\PlatformBds.inf"), Db, '', '')
for Inf in P.Sources:
print(Inf)
for Item in P.Macros:
print(Item, P.Macros[Item])
Db.Close()

View File

@ -2,7 +2,7 @@
# This file is used to define common parsing related functions used in parsing
# Inf/Dsc/Makefile process
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@ -25,32 +25,6 @@ from . import EotGlobalData
from Common.StringUtils import GetSplitList
from Common.LongFilePathSupport import OpenLongFilePath as open
import subprocess
## DeCompress
#
# Call external decompress tool to decompress the fv section
#
def DeCompress(Method, Input):
# Write the input to a temp file
open('_Temp.bin', 'wb').write(Input)
cmd = ''
if Method == 'Lzma':
cmd = r'LzmaCompress -o _New.bin -d _Temp.bin'
if Method == 'Efi':
cmd = r'TianoCompress -d --uefi -o _New.bin _Temp.bin'
if Method == 'Framework':
cmd = r'TianoCompress -d -o _New.bin _Temp.bin'
# Call tool to create the decompressed output file
Process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
Process.communicate()[0]
# Return the beffer of New.bin
if os.path.exists('New.bin'):
return open('New.bin', 'rb').read()
## PreProcess() method
#
# Pre process a file

View File

@ -77,7 +77,7 @@ class Report(object):
def GenerateUnDispatchedList(self):
FvObj = self.FvObj
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.Name)
for Item in FvObj.UnDispatchedFfsDict.keys():
for Item in FvObj.UnDispatchedFfsDict:
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.UnDispatchedFfsDict[Item])
## GenerateFv() method
@ -112,7 +112,7 @@ class Report(object):
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('Dispatched:\n')
for FfsId in FvObj.OrderedFfsDict.keys():
for FfsId in FvObj.OrderedFfsDict:
self.GenerateFfs(FvObj.OrderedFfsDict[FfsId])
Content = """ </table></td>
</tr>"""
@ -125,7 +125,7 @@ class Report(object):
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('\nUnDispatched:\n')
for FfsId in FvObj.UnDispatchedFfsDict.keys():
for FfsId in FvObj.UnDispatchedFfsDict:
self.GenerateFfs(FvObj.UnDispatchedFfsDict[FfsId])
Content = """ </table></td>
</tr>"""

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from struct import *
import Common.LongFilePathOs as os
from io import BytesIO
@ -51,7 +52,7 @@ class AprioriSection (AprioriSectionClassObject):
def GenFfs (self, FvName, Dict = {}, IsMakefile = False):
DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
Buffer = BytesIO()
Buffer = BytesIO('')
AprioriFileGuid = DXE_GUID
if self.AprioriType == "PEI":
AprioriFileGuid = PEI_GUID
@ -96,7 +97,7 @@ class AprioriSection (AprioriSectionClassObject):
GuidPart = Guid.split('-')
Buffer.write(pack('I', int(GuidPart[0], 16)))
Buffer.write(pack('I', long(GuidPart[0], 16)))
Buffer.write(pack('H', int(GuidPart[1], 16)))
Buffer.write(pack('H', int(GuidPart[2], 16)))

View File

@ -15,13 +15,13 @@
##
# Import Modules
#
from __future__ import absolute_import
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from .GenFdsGlobalVariable import FindExtendTool
from CommonDataClass.FdfClass import CapsuleClassObject
import Common.LongFilePathOs as os
import subprocess
from io import BytesIO
from io import StringIO
from Common.Misc import SaveFileOnChange
from Common.Misc import PackRegistryFormatGuid
import uuid
@ -185,7 +185,7 @@ class Capsule (CapsuleClassObject) :
#
# The real capsule header structure is 28 bytes
#
Header.write(b'\x00'*(HdrSize-28))
Header.write('\x00'*(HdrSize-28))
Header.write(FwMgrHdr.getvalue())
Header.write(Content.getvalue())
#
@ -247,7 +247,7 @@ class Capsule (CapsuleClassObject) :
def GenCapInf(self):
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiCapsuleName + "_Cap" + '.inf')
CapInfFile = StringIO() #open (self.CapInfFileName , 'w+')
CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+')
CapInfFile.writelines("[options]" + T_CHAR_LF)

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Ffs
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from io import BytesIO
@ -82,7 +83,7 @@ class CapsuleFv (CapsuleData):
if self.FvName.find('.fv') == -1:
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]
FdBuffer = BytesIO()
FdBuffer = BytesIO('')
FvObj.CapsuleName = self.CapsuleName
FvFile = FvObj.AddToBuffer(FdBuffer)
FvObj.CapsuleName = None
@ -229,7 +230,7 @@ class CapsulePayload(CapsuleData):
)
if AuthData:
Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])
Buffer += uuid.UUID(AuthData[4]).bytes_le
Buffer += uuid.UUID(AuthData[4]).get_bytes_le()
#
# Append file content to the structure

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from .Ffs import Ffs
from . import Section
import subprocess

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import subprocess
@ -87,9 +88,9 @@ class DataSection (DataSectionClassObject):
if ImageObj.SectionAlignment < 0x400:
self.Alignment = str (ImageObj.SectionAlignment)
elif ImageObj.SectionAlignment < 0x100000:
self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
self.Alignment = str (ImageObj.SectionAlignment / 0x400) + 'K'
else:
self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
self.Alignment = str (ImageObj.SectionAlignment / 0x100000) + 'M'
NoStrip = True
if self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import subprocess

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from struct import *
from . import Section
from .GenFdsGlobalVariable import GenFdsGlobalVariable
@ -247,9 +248,9 @@ class EfiSection (EfiSectionClassObject):
if ImageObj.SectionAlignment < 0x400:
Align = str (ImageObj.SectionAlignment)
elif ImageObj.SectionAlignment < 0x100000:
Align = str (ImageObj.SectionAlignment // 0x400) + 'K'
Align = str (ImageObj.SectionAlignment / 0x400) + 'K'
else:
Align = str (ImageObj.SectionAlignment // 0x100000) + 'M'
Align = str (ImageObj.SectionAlignment / 0x100000) + 'M'
if File[(len(File)-4):] == '.efi':
MapFile = File.replace('.efi', '.map')

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Region
from . import Fv
import Common.LongFilePathOs as os
@ -74,7 +75,7 @@ class FD(FDClassObject):
HasCapsuleRegion = True
break
if HasCapsuleRegion:
TempFdBuffer = BytesIO()
TempFdBuffer = BytesIO('')
PreviousRegionStart = -1
PreviousRegionSize = 1
@ -103,7 +104,7 @@ class FD(FDClassObject):
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.vtfRawDict, self.DefineVarDict)
FdBuffer = BytesIO()
FdBuffer = BytesIO('')
PreviousRegionStart = -1
PreviousRegionSize = 1
for RegionObj in self.RegionList :

View File

@ -16,6 +16,8 @@
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import re
from . import Fd
@ -155,7 +157,7 @@ class IncludeFileProfile :
self.FileName = FileName
self.FileLinesList = []
try:
fsock = open(FileName, "r")
fsock = open(FileName, "rb", 0)
try:
self.FileLinesList = fsock.readlines()
for index, line in enumerate(self.FileLinesList):
@ -216,7 +218,7 @@ class FileProfile :
def __init__(self, FileName):
self.FileLinesList = []
try:
fsock = open(FileName, "r")
fsock = open(FileName, "rb", 0)
try:
self.FileLinesList = fsock.readlines()
finally:
@ -1615,7 +1617,7 @@ class FdfParser:
self.SetPcdLocalation(pcdPair)
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
Obj.Size = int(Size, 0)
Obj.Size = long(Size, 0)
return True
if self.__IsKeyword( "ErasePolarity"):
@ -1651,7 +1653,7 @@ class FdfParser:
if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
raise Warning("expected address", self.FileName, self.CurrentLineNumber)
BsAddress = int(self.__Token, 0)
BsAddress = long(self.__Token, 0)
Obj.BsBaseAddress = BsAddress
if self.__IsKeyword("RtBaseAddress"):
@ -1661,7 +1663,7 @@ class FdfParser:
if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
raise Warning("expected address", self.FileName, self.CurrentLineNumber)
RtAddress = int(self.__Token, 0)
RtAddress = long(self.__Token, 0)
Obj.RtBaseAddress = RtAddress
## __GetBlockStatements() method
@ -1709,7 +1711,7 @@ class FdfParser:
self.SetPcdLocalation(PcdPair)
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple
BlockSize = int(BlockSize, 0)
BlockSize = long(BlockSize, 0)
BlockNumber = None
if self.__IsKeyword( "NumBlocks"):
@ -1719,7 +1721,7 @@ class FdfParser:
if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
raise Warning("expected block numbers", self.FileName, self.CurrentLineNumber)
BlockNumber = int(self.__Token, 0)
BlockNumber = long(self.__Token, 0)
Obj.BlockSizeList.append((BlockSize, BlockNumber, BlockSizePcd))
return True
@ -1828,7 +1830,7 @@ class FdfParser:
Expr += CurCh
self.__GetOneChar()
try:
return int(
return long(
ValueExpression(Expr,
self.__CollectMacroPcd()
)(True), 0)
@ -1876,7 +1878,7 @@ class FdfParser:
RegionOffsetPcdPattern.match(self.__CurrentLine()[self.CurrentOffsetWithinLine:]))
if IsRegionPcd:
RegionObj.PcdOffset = self.__GetNextPcdSettings()
self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + int(Fd.BaseAddress, 0))
self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + long(Fd.BaseAddress, 0))
self.SetPcdLocalation(RegionObj.PcdOffset)
self.__PcdDict['%s.%s' % (RegionObj.PcdOffset[1], RegionObj.PcdOffset[0])] = "0x%x" % RegionObj.Offset
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
@ -3231,9 +3233,9 @@ class FdfParser:
if FdfParser.__Verify(Name, Value, 'UINT64'):
FmpData.MonotonicCount = Value
if FmpData.MonotonicCount.upper().startswith('0X'):
FmpData.MonotonicCount = (int)(FmpData.MonotonicCount, 16)
FmpData.MonotonicCount = (long)(FmpData.MonotonicCount, 16)
else:
FmpData.MonotonicCount = (int)(FmpData.MonotonicCount)
FmpData.MonotonicCount = (long)(FmpData.MonotonicCount)
if not self.__GetNextToken():
break
else:

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Ffs
from . import Rule
import Common.LongFilePathOs as os
@ -82,7 +83,7 @@ class FileStatement (FileStatementClassObject) :
Dict.update(self.DefineVarDict)
SectionAlignments = None
if self.FvName is not None :
Buffer = BytesIO()
Buffer = BytesIO('')
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
@ -99,7 +100,7 @@ class FileStatement (FileStatementClassObject) :
elif self.FileName is not None:
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
FileContent = BytesIO()
FileContent = ''
MaxAlignIndex = 0
MaxAlignValue = 1
for Index, File in enumerate(self.FileName):
@ -115,15 +116,15 @@ class FileStatement (FileStatementClassObject) :
if AlignValue > MaxAlignValue:
MaxAlignIndex = Index
MaxAlignValue = AlignValue
FileContent.write(Content)
if len(FileContent.getvalue()) % AlignValue != 0:
FileContent += Content
if len(FileContent) % AlignValue != 0:
Size = AlignValue - len(FileContent) % AlignValue
for i in range(0, Size):
FileContent.write(pack('B', 0xFF))
FileContent += pack('B', 0xFF)
if FileContent.getvalue() != b'':
if FileContent:
OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw')
SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True)
SaveFileOnChange(OutputRAWFile, FileContent, True)
self.FileName = OutputRAWFile
self.SubAlignment = self.SubAlignment[MaxAlignIndex]

View File

@ -16,6 +16,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Rule
import Common.LongFilePathOs as os
from io import BytesIO
@ -770,9 +771,9 @@ class FfsInfStatement(FfsInfStatementClassObject):
if ImageObj.SectionAlignment < 0x400:
self.Alignment = str (ImageObj.SectionAlignment)
elif ImageObj.SectionAlignment < 0x100000:
self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
self.Alignment = str (ImageObj.SectionAlignment / 0x400) + 'K'
else:
self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
self.Alignment = str (ImageObj.SectionAlignment / 0x100000) + 'M'
if not NoStrip:
FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
@ -812,9 +813,9 @@ class FfsInfStatement(FfsInfStatementClassObject):
if ImageObj.SectionAlignment < 0x400:
self.Alignment = str (ImageObj.SectionAlignment)
elif ImageObj.SectionAlignment < 0x100000:
self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
self.Alignment = str (ImageObj.SectionAlignment / 0x400) + 'K'
else:
self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
self.Alignment = str (ImageObj.SectionAlignment / 0x100000) + 'M'
if not NoStrip:
FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
@ -1073,7 +1074,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
def __GetBuildOutputMapFileVfrUniInfo(self, VfrUniBaseName):
MapFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".map")
EfiFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".efi")
return GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
return GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
## __GenUniVfrOffsetFile() method
#
@ -1086,7 +1087,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):
# Use a instance of StringIO to cache data
fStringIO = BytesIO()
fStringIO = BytesIO('')
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
@ -1096,7 +1097,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
fStringIO.write(bytes(UniGuid))
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@ -1106,7 +1108,8 @@ class FfsInfStatement(FfsInfStatementClassObject):
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
fStringIO.write(bytes(VfrGuid))
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
fStringIO.write(''.join(VfrGuid))
type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)

View File

@ -1,3 +1,4 @@
from __future__ import absolute_import
## @file
# process FV generation
#
@ -18,7 +19,6 @@
import Common.LongFilePathOs as os
import subprocess
from io import BytesIO
from io import StringIO
from struct import *
from . import Ffs
@ -205,16 +205,16 @@ class FV (FvClassObject):
# PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48)
# FV alignment position.
FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F)
if FvAlignmentValue >= 0x400:
if FvAlignmentValue >= 0x100000:
if FvAlignmentValue >= 0x1000000:
#The max alignment supported by FFS is 16M.
self.FvAlignment = "16M"
else:
self.FvAlignment = str(FvAlignmentValue // 0x100000) + "M"
self.FvAlignment = str(FvAlignmentValue / 0x100000) + "M"
else:
self.FvAlignment = str(FvAlignmentValue // 0x400) + "K"
self.FvAlignment = str(FvAlignmentValue / 0x400) + "K"
else:
# FvAlignmentValue is less than 1K
self.FvAlignment = str (FvAlignmentValue)
@ -265,7 +265,7 @@ class FV (FvClassObject):
#
self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiFvName + '.inf')
self.FvInfFile = StringIO()
self.FvInfFile = BytesIO()
#
# Add [Options]
@ -340,7 +340,7 @@ class FV (FvClassObject):
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
else:
TotalSize = 16 + 4
Buffer = bytearray()
Buffer = ''
if self.UsedSizeEnable:
TotalSize += (4 + 4)
## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03
@ -367,7 +367,7 @@ class FV (FvClassObject):
#
Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)
+ PackGUID(Guid)
+ bytes(self.UiFvName, 'utf-8'))
+ self.UiFvName)
for Index in range (0, len(self.FvExtEntryType)):
if self.FvExtEntryType[Index] == 'FILE':

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
from io import BytesIO
from .Ffs import Ffs
@ -70,7 +71,7 @@ class FvImageSection(FvImageSectionClassObject):
# PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48)
# FV alignment position.
FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
FvAlignmentValue = 1 << (ord (FvHeaderBuffer[0x2E]) & 0x1F)
FvFileObj.close()
if FvAlignmentValue > MaxFvAlignment:
MaxFvAlignment = FvAlignmentValue
@ -86,9 +87,9 @@ class FvImageSection(FvImageSectionClassObject):
if MaxFvAlignment >= 0x1000000:
self.Alignment = "16M"
else:
self.Alignment = str(MaxFvAlignment // 0x100000) + "M"
self.Alignment = str(MaxFvAlignment / 0x100000) + "M"
else:
self.Alignment = str (MaxFvAlignment // 0x400) + "K"
self.Alignment = str (MaxFvAlignment / 0x400) + "K"
else:
# MaxFvAlignment is less than 1K
self.Alignment = str (MaxFvAlignment)
@ -98,12 +99,10 @@ class FvImageSection(FvImageSectionClassObject):
# Generate Fv
#
if self.FvName is not None:
Buffer = BytesIO()
Buffer = BytesIO('')
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
if Fv is not None:
self.Fv = Fv
if not self.FvAddr and self.Fv.BaseAddress:
self.FvAddr = self.Fv.BaseAddress
FvFileName = Fv.AddToBuffer(Buffer, self.FvAddr, MacroDict = Dict, Flag=IsMakefile)
if Fv.FvAlignment is not None:
if self.Alignment is None:
@ -120,7 +119,7 @@ class FvImageSection(FvImageSectionClassObject):
# PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48)
# FV alignment position.
FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
FvAlignmentValue = 1 << (ord (FvHeaderBuffer[0x2E]) & 0x1F)
# FvAlignmentValue is larger than or equal to 1K
if FvAlignmentValue >= 0x400:
if FvAlignmentValue >= 0x100000:
@ -128,9 +127,9 @@ class FvImageSection(FvImageSectionClassObject):
if FvAlignmentValue >= 0x1000000:
self.Alignment = "16M"
else:
self.Alignment = str(FvAlignmentValue // 0x100000) + "M"
self.Alignment = str(FvAlignmentValue / 0x100000) + "M"
else:
self.Alignment = str (FvAlignmentValue // 0x400) + "K"
self.Alignment = str (FvAlignmentValue / 0x400) + "K"
else:
# FvAlignmentValue is less than 1K
self.Alignment = str (FvAlignmentValue)

View File

@ -15,6 +15,8 @@
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
from optparse import OptionParser
import sys
import Common.LongFilePathOs as os
@ -27,7 +29,6 @@ from Workspace.BuildClassObject import PcdClassObject
from . import RuleComplexFile
from .EfiSection import EfiSection
from io import BytesIO
from io import StringIO
import Common.TargetTxtClassObject as TargetTxtClassObject
import Common.ToolDefClassObject as ToolDefClassObject
from Common.DataType import *
@ -321,8 +322,6 @@ def main():
continue
for RegionData in RegionObj.RegionDataList:
if FvObj.UiFvName.upper() == RegionData.upper():
if not FvObj.BaseAddress:
FvObj.BaseAddress = '0x%x' % (int(FdObj.BaseAddress, 0) + RegionObj.Offset)
if FvObj.FvRegionInFD:
if FvObj.FvRegionInFD != RegionObj.Size:
EdkLogger.error("GenFds", FORMAT_INVALID, "The FV %s's region is specified in multiple FD with different value." %FvObj.UiFvName)
@ -457,7 +456,7 @@ class GenFds :
return
elif GenFds.OnlyGenerateThisFv is None:
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
Buffer = BytesIO()
Buffer = BytesIO('')
FvObj.AddToBuffer(Buffer)
Buffer.close()
@ -560,9 +559,9 @@ class GenFds :
GenFdsGlobalVariable.InfLogger('\nFV Space Information')
for FvSpaceInfo in FvSpaceInfoList:
Name = FvSpaceInfo[0]
TotalSizeValue = int(FvSpaceInfo[1], 0)
UsedSizeValue = int(FvSpaceInfo[2], 0)
FreeSizeValue = int(FvSpaceInfo[3], 0)
TotalSizeValue = long(FvSpaceInfo[1], 0)
UsedSizeValue = long(FvSpaceInfo[2], 0)
FreeSizeValue = long(FvSpaceInfo[3], 0)
if UsedSizeValue == TotalSizeValue:
Percentage = '100'
else:
@ -588,7 +587,7 @@ class GenFds :
if PcdValue == '':
return
Int64PcdValue = int(PcdValue, 0)
Int64PcdValue = long(PcdValue, 0)
if Int64PcdValue == 0 or Int64PcdValue < -1:
return
@ -603,27 +602,22 @@ class GenFds :
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")
GuidXRefFile = StringIO('')
GuidXRefFile = BytesIO('')
PkgGuidDict = {}
GuidDict = {}
ModuleList = []
FileGuidList = []
GuidPattern = gGuidPattern
VariableGuidSet = set()
for Arch in ArchList:
PlatformDataBase = BuildDb.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
PkgList = GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag)
for P in PkgList:
PkgGuidDict.update(P.Guids)
for Name, Guid in sorted(PlatformDataBase.Pcds):
for Name, Guid in PlatformDataBase.Pcds:
Pcd = PlatformDataBase.Pcds[Name, Guid]
if Pcd.Type in [TAB_PCDS_DYNAMIC_HII, TAB_PCDS_DYNAMIC_EX_HII]:
for SkuId in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuId]
if Sku.VariableGuid in VariableGuidSet:
continue
else:
VariableGuidSet.add(Sku.VariableGuid)
if Sku.VariableGuid and Sku.VariableGuid in PkgGuidDict.keys():
GuidDict[Sku.VariableGuid] = PkgGuidDict[Sku.VariableGuid]
for ModuleFile in PlatformDataBase.Modules:
@ -691,7 +685,7 @@ class GenFds :
F.read()
length = F.tell()
F.seek(4)
TmpStr = unpack('%dh' % ((length - 4) // 2), F.read())
TmpStr = unpack('%dh' % ((length - 4) / 2), F.read())
Name = ''.join(chr(c) for c in TmpStr[:-1])
else:
FileList = []

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os
import sys
import subprocess
@ -720,8 +721,8 @@ class GenFdsGlobalVariable:
return
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
GenFdsGlobalVariable.InfLogger (out.decode(encoding='utf-8',errors='ignore'))
GenFdsGlobalVariable.InfLogger (error.decode(encoding='utf-8', errors='ignore'))
GenFdsGlobalVariable.InfLogger (out)
GenFdsGlobalVariable.InfLogger (error)
if PopenObject.returncode != 0:
print("###", cmd)
EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)

View File

@ -16,6 +16,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
import subprocess
from .Ffs import Ffs

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import RuleSimpleFile
from . import RuleComplexFile
from . import Section

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import subprocess

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from struct import *
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from io import BytesIO
@ -57,8 +58,8 @@ class Region(RegionClassObject):
PadByte = pack('B', 0xFF)
else:
PadByte = pack('B', 0)
for i in range(0, Size):
Buffer.write(PadByte)
PadData = ''.join(PadByte for i in xrange(0, Size))
Buffer.write(PadData)
## AddToBuffer()
#
@ -127,7 +128,7 @@ class Region(RegionClassObject):
if self.FvAddress % FvAlignValue != 0:
EdkLogger.error("GenFds", GENFDS_ERROR,
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
FvBuffer = BytesIO()
FvBuffer = BytesIO('')
FvBaseAddress = '0x%X' % self.FvAddress
BlockSize = None
BlockNum = None
@ -296,7 +297,7 @@ class Region(RegionClassObject):
else:
# region ended within current blocks
if self.Offset + self.Size <= End:
ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1) // BlockSize))
ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1) / BlockSize))
break
# region not ended yet
else:
@ -305,7 +306,7 @@ class Region(RegionClassObject):
UsedBlockNum = BlockNum
# region started in middle of current blocks
else:
UsedBlockNum = (End - self.Offset) // BlockSize
UsedBlockNum = (End - self.Offset) / BlockSize
Start = End
ExpectedList.append((BlockSize, UsedBlockNum))
RemindingSize -= BlockSize * UsedBlockNum

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Rule
from CommonDataClass.FdfClass import RuleComplexFileClassObject

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Rule
from CommonDataClass.FdfClass import RuleSimpleFileClassObject

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from CommonDataClass.FdfClass import SectionClassObject
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import Common.LongFilePathOs as os, glob

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
from .Ffs import Ffs
import subprocess

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from .Ffs import Ffs
from . import Section
import Common.LongFilePathOs as os

View File

@ -15,6 +15,7 @@
##
# Import Modules
#
from __future__ import absolute_import
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import Common.LongFilePathOs as os
from CommonDataClass.FdfClass import VtfClassObject

View File

@ -17,6 +17,7 @@
#
#====================================== External Libraries ========================================
from __future__ import print_function
import optparse
import Common.LongFilePathOs as os
import re

View File

@ -133,7 +133,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
#
for Index in range(ValueLength):
ByteList[ValueOffset + Index] = ValueNumber % 0x100
ValueNumber = ValueNumber // 0x100
ValueNumber = ValueNumber / 0x100
elif TypeName == TAB_VOID:
ValueString = SavedStr
if ValueString.startswith('L"'):

View File

@ -19,6 +19,7 @@
'''
Pkcs7Sign
'''
from __future__ import print_function
import os
import sys
@ -75,9 +76,8 @@ if __name__ == '__main__':
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--version", action='version', version=__version__)
group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
group.add_argument("-d", action="store_true", dest='Decode', help='decode file')
parser.add_argument("-o", "--output", dest='OutputFile', type=str, metavar='filename', help="specify the output filename", required=True)
@ -121,7 +121,7 @@ if __name__ == '__main__':
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0].decode())
print(Version[0])
#
# Read input file into a buffer and save input filename
@ -141,11 +141,11 @@ if __name__ == '__main__':
try:
if args.MonotonicCountStr.upper().startswith('0X'):
args.MonotonicCountValue = (int)(args.MonotonicCountStr, 16)
args.MonotonicCountValue = (long)(args.MonotonicCountStr, 16)
else:
args.MonotonicCountValue = (int)(args.MonotonicCountStr)
args.MonotonicCountValue = (long)(args.MonotonicCountStr)
except:
args.MonotonicCountValue = (int)(0)
args.MonotonicCountValue = (long)(0)
if args.Encode:
#
@ -251,9 +251,9 @@ if __name__ == '__main__':
sys.exit(1)
else:
if args.SignatureSizeStr.upper().startswith('0X'):
SignatureSize = (int)(args.SignatureSizeStr, 16)
SignatureSize = (long)(args.SignatureSizeStr, 16)
else:
SignatureSize = (int)(args.SignatureSizeStr)
SignatureSize = (long)(args.SignatureSizeStr)
if SignatureSize < 0:
print("ERROR: The value of option --signature-size can't be set to negative value!")
sys.exit(1)

View File

@ -22,6 +22,7 @@
'''
Rsa2048Sha256GenerateKeys
'''
from __future__ import print_function
import os
import sys
@ -42,9 +43,8 @@ if __name__ == '__main__':
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--version", action='version', version=__version__)
group.add_argument("-o", "--output", dest='OutputFile', type=argparse.FileType('wb'), metavar='filename', nargs='*', help="specify the output private key filename in PEM format")
group.add_argument("-i", "--input", dest='InputFile', type=argparse.FileType('rb'), metavar='filename', nargs='*', help="specify the input private key filename in PEM format")
parser.add_argument("--public-key-hash", dest='PublicKeyHashFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in binary format")
@ -83,7 +83,7 @@ if __name__ == '__main__':
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0].decode())
print(Version[0])
args.PemFileName = []
@ -118,19 +118,19 @@ if __name__ == '__main__':
args.PemFileName.append(Item.name)
Item.close()
PublicKeyHash = bytearray()
PublicKeyHash = ''
for Item in args.PemFileName:
#
# Extract public key from private key into STDOUT
#
Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()
if Process.returncode != 0:
print('ERROR: Unable to extract public key from private key')
sys.exit(Process.returncode)
PublicKey = bytearray()
PublicKey = ''
for Index in range (0, len(PublicKeyHexString), 2):
PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]
PublicKey = PublicKey + chr(int(PublicKeyHexString[Index:Index + 2], 16))
#
# Generate SHA 256 hash of RSA 2048 bit public key into STDOUT
@ -156,14 +156,14 @@ if __name__ == '__main__':
#
PublicKeyHashC = '{'
for Item in PublicKeyHash:
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item))
PublicKeyHashC = PublicKeyHashC[:-2] + '}'
#
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file
#
try:
args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))
args.PublicKeyHashCFile.write (PublicKeyHashC)
args.PublicKeyHashCFile.close ()
except:
pass

View File

@ -17,6 +17,7 @@
'''
Rsa2048Sha256Sign
'''
from __future__ import print_function
import os
import sys
@ -61,9 +62,8 @@ if __name__ == '__main__':
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--version", action='version', version=__version__)
group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
group.add_argument("-d", action="store_true", dest='Decode', help='decode file')
parser.add_argument("-o", "--output", dest='OutputFile', type=str, metavar='filename', help="specify the output filename", required=True)
@ -104,7 +104,7 @@ if __name__ == '__main__':
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0].decode())
print(Version[0])
#
# Read input file into a buffer and save input filename
@ -152,11 +152,10 @@ if __name__ == '__main__':
# Extract public key from private key into STDOUT
#
Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
PublicKeyHexString = PublicKeyHexString.decode(encoding='utf-8')
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()
PublicKey = ''
while len(PublicKeyHexString) > 0:
PublicKey = PublicKey + PublicKeyHexString[0:2]
PublicKey = PublicKey + chr(int(PublicKeyHexString[0:2], 16))
PublicKeyHexString=PublicKeyHexString[2:]
if Process.returncode != 0:
sys.exit(Process.returncode)
@ -164,9 +163,9 @@ if __name__ == '__main__':
if args.MonotonicCountStr:
try:
if args.MonotonicCountStr.upper().startswith('0X'):
args.MonotonicCountValue = (int)(args.MonotonicCountStr, 16)
args.MonotonicCountValue = (long)(args.MonotonicCountStr, 16)
else:
args.MonotonicCountValue = (int)(args.MonotonicCountStr)
args.MonotonicCountValue = (long)(args.MonotonicCountStr)
except:
pass
@ -187,8 +186,8 @@ if __name__ == '__main__':
# Write output file that contains hash GUID, Public Key, Signature, and Input data
#
args.OutputFile = open(args.OutputFileName, 'wb')
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.bytes_le)
args.OutputFile.write(bytearray.fromhex(PublicKey))
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le())
args.OutputFile.write(PublicKey)
args.OutputFile.write(Signature)
args.OutputFile.write(args.InputFileBuffer)
args.OutputFile.close()
@ -210,7 +209,7 @@ if __name__ == '__main__':
#
# Verify the public key
#
if Header.PublicKey != bytearray.fromhex(PublicKey):
if Header.PublicKey != PublicKey:
print('ERROR: Public key in input file does not match public key from private key file')
sys.exit(1)

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table

View File

@ -1,3 +1,4 @@
from __future__ import absolute_import
## @file
# This file is used to create/update/query/erase table for dsc datas
#

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import Common.LongFilePathOs as os, time
from Table.Table import Table

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.StringUtils import ConvertToSqlString
from Table.Table import Table

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.StringUtils import ConvertToSqlString
from Table.Table import Table

View File

@ -14,6 +14,7 @@
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import Common.LongFilePathOs as os, time
from Table.Table import Table

View File

@ -12,6 +12,7 @@
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from __future__ import print_function
import Common.LongFilePathOs as os
import sys
import traceback

View File

@ -245,7 +245,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
# save to file
try:
f = open (Target, 'w')
f = open (Target, 'wb')
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
f.writelines(NewLines)
@ -458,7 +458,7 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
fStringIO = BytesIO('')
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
@ -468,7 +468,8 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
fStringIO.write(bytes(UniGuid))
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@ -478,7 +479,9 @@ def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
fStringIO.write(bytes(VfrGuid))
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
fStringIO.write(''.join(VfrGuid))
type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
@ -559,7 +562,7 @@ def TrimEdkSourceCode(Source, Target):
CreateDirectory(os.path.dirname(Target))
try:
f = open (Source, 'r')
f = open (Source, 'rb')
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# read whole file
@ -578,7 +581,7 @@ def TrimEdkSourceCode(Source, Target):
return
try:
f = open (Target, 'w')
f = open (Target, 'wb')
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
f.write(NewLines)

View File

@ -66,7 +66,7 @@ class RecoverMgr:
arc = self._tryhook(path)
if arc and os.path.isfile(path):
self._createzip()
self.zip.write(path, arc)
self.zip.write(path, arc.encode('utf_8'))
sta = os.stat(path)
oldmode = stat.S_IMODE(sta.st_mode)
self.rlist.append(_PathInfo(_CHMOD, path, oldmode))

View File

@ -44,7 +44,7 @@ class IpiDatabase(object):
Dir = os.path.dirname(DbPath)
if not os.path.isdir(Dir):
os.mkdir(Dir)
self.Conn = sqlite3.connect(DbPath, isolation_level='DEFERRED')
self.Conn = sqlite3.connect(unicode(DbPath), isolation_level='DEFERRED')
self.Conn.execute("PRAGMA page_size=4096")
self.Conn.execute("PRAGMA synchronous=OFF")
self.Cur = self.Conn.cursor()

View File

@ -585,17 +585,17 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
ContainerFile = GetUniFileName(os.path.dirname(PackageObject.GetFullPath()), PackageObject.GetBaseName())
Content = UniFileHeader + '\n'
Content += '\n'
Content = UniFileHeader + '\r\n'
Content += '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_ABSTRACT, PackageObject.GetAbstract(), ContainerFile) + '\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_ABSTRACT, PackageObject.GetAbstract(), ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_DESCRIPTION, PackageObject.GetDescription(), ContainerFile) \
+ '\n'
+ '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) + '\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_DESCRIPTION, BinaryDescription, ContainerFile) + '\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_DESCRIPTION, BinaryDescription, ContainerFile) + '\r\n'
PromptGenList = []
HelpTextGenList = []
@ -612,7 +612,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
if (PcdPromptStrName, Lang) not in PromptGenList:
TokenValueList.append((Lang, PromptStr))
PromptGenList.append((PcdPromptStrName, Lang))
PromptString = FormatUniEntry(PcdPromptStrName, TokenValueList, ContainerFile) + '\n'
PromptString = FormatUniEntry(PcdPromptStrName, TokenValueList, ContainerFile) + '\r\n'
if PromptString not in Content:
Content += PromptString
@ -628,7 +628,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
if (PcdHelpStrName, Lang) not in HelpTextGenList:
TokenValueList.append((Lang, HelpStr))
HelpTextGenList.append((PcdHelpStrName, Lang))
HelpTextString = FormatUniEntry(PcdHelpStrName, TokenValueList, ContainerFile) + '\n'
HelpTextString = FormatUniEntry(PcdHelpStrName, TokenValueList, ContainerFile) + '\r\n'
if HelpTextString not in Content:
Content += HelpTextString
@ -639,7 +639,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PcdErrStrName = '#string ' + TAB_STR_TOKENCNAME + TAB_UNDERLINE_SPLIT + Pcd.GetTokenSpaceGuidCName() \
+ TAB_UNDERLINE_SPLIT + TAB_STR_TOKENERR \
+ TAB_UNDERLINE_SPLIT + ErrorNo[2:]
PcdErrString = FormatUniEntry(PcdErrStrName, PcdError.GetErrorMessageList(), ContainerFile) + '\n'
PcdErrString = FormatUniEntry(PcdErrStrName, PcdError.GetErrorMessageList(), ContainerFile) + '\r\n'
if PcdErrString not in Content:
Content += PcdErrString

View File

@ -234,22 +234,22 @@ def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCOD
if not os.path.exists(os.path.dirname(ModuleObject.GetFullPath())):
os.makedirs(os.path.dirname(ModuleObject.GetFullPath()))
Content = UniFileHeader + '\n'
Content += '\n'
Content = UniFileHeader + '\r\n'
Content += '\r\n'
Content += FormatUniEntry('#string ' + DT.TAB_INF_ABSTRACT, ModuleObject.GetAbstract(), ContainerFile) + '\n'
Content += FormatUniEntry('#string ' + DT.TAB_INF_ABSTRACT, ModuleObject.GetAbstract(), ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + DT.TAB_INF_DESCRIPTION, ModuleObject.GetDescription(), ContainerFile) \
+ '\n'
+ '\r\n'
BinaryAbstractString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_ABSTRACT, BinaryAbstract, ContainerFile)
if BinaryAbstractString:
Content += BinaryAbstractString + '\n'
Content += BinaryAbstractString + '\r\n'
BinaryDescriptionString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_DESCRIPTION, BinaryDescription, \
ContainerFile)
if BinaryDescriptionString:
Content += BinaryDescriptionString + '\n'
Content += BinaryDescriptionString + '\r\n'
if not os.path.exists(ContainerFile):
File = codecs.open(ContainerFile, 'wb', Encoding)
@ -274,7 +274,7 @@ def GenDefines(ModuleObject):
if not DefinesDict:
continue
for Statement in DefinesDict:
if len(Statement.split(DT.TAB_EQUAL_SPLIT)) > 1:
if Statement.split(DT.TAB_EQUAL_SPLIT) > 1:
Statement = (u'%s ' % Statement.split(DT.TAB_EQUAL_SPLIT, 1)[0]).ljust(LeftOffset) \
+ u'= %s' % Statement.split(DT.TAB_EQUAL_SPLIT, 1)[1].lstrip()
SortedArch = DT.TAB_ARCH_COMMON
@ -409,7 +409,7 @@ def GenLibraryClasses(ModuleObject):
Statement += '|' + FFE
ModuleList = LibraryClass.GetSupModuleList()
ArchList = LibraryClass.GetSupArchList()
for Index in range(0, len(ArchList)):
for Index in xrange(0, len(ArchList)):
ArchList[Index] = ConvertArchForInstall(ArchList[Index])
ArchList.sort()
SortedArch = ' '.join(ArchList)
@ -572,7 +572,7 @@ def GenUserExtensions(ModuleObject):
# if not Statement:
# continue
ArchList = UserExtension.GetSupArchList()
for Index in range(0, len(ArchList)):
for Index in xrange(0, len(ArchList)):
ArchList[Index] = ConvertArchForInstall(ArchList[Index])
ArchList.sort()
KeyList = []

View File

@ -124,46 +124,50 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
#
# Convert special character to (c), (r) and (tm).
#
if isinstance(Abstract, unicode):
Abstract = ConvertSpecialUnicodes(Abstract)
if isinstance(Description, unicode):
Description = ConvertSpecialUnicodes(Description)
if IsBinaryHeader:
Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_BINARY_HEADER_COMMENT + '\n'
Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_BINARY_HEADER_COMMENT + '\r\n'
elif CommChar == TAB_COMMENT_EDK1_SPLIT:
Content += CommChar + TAB_SPACE_SPLIT + TAB_COMMENT_EDK1_START + TAB_STAR + TAB_SPACE_SPLIT +\
TAB_HEADER_COMMENT + '\n'
TAB_HEADER_COMMENT + '\r\n'
else:
Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_HEADER_COMMENT + '\n'
Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_HEADER_COMMENT + '\r\n'
if Abstract:
Abstract = Abstract.rstrip('\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
Abstract = Abstract.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(Abstract, '\n'))
Content += '\n' + CommChar + '\n'
Content += '\r\n' + CommChar + '\r\n'
else:
Content += CommChar + '\n'
Content += CommChar + '\r\n'
if Description:
Description = Description.rstrip('\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
Description = Description.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(Description, '\n'))
Content += '\n' + CommChar + '\n'
Content += '\r\n' + CommChar + '\r\n'
#
# There is no '#\n' line to separate multiple copyright lines in code base
#
if Copyright:
Copyright = Copyright.rstrip('\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\n' + CommChar + TAB_SPACE_SPLIT).join\
Copyright = Copyright.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join\
(GetSplitValueList(Copyright, '\n'))
Content += '\n' + CommChar + '\n'
Content += '\r\n' + CommChar + '\r\n'
if License:
License = License.rstrip('\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
License = License.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(License, '\n'))
Content += '\n' + CommChar + '\n'
Content += '\r\n' + CommChar + '\r\n'
if CommChar == TAB_COMMENT_EDK1_SPLIT:
Content += CommChar + TAB_SPACE_SPLIT + TAB_STAR + TAB_COMMENT_EDK1_END + '\n'
Content += CommChar + TAB_SPACE_SPLIT + TAB_STAR + TAB_COMMENT_EDK1_END + '\r\n'
else:
Content += CommChar * 2 + '\n'
Content += CommChar * 2 + '\r\n'
return Content

View File

@ -74,7 +74,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
# first find the last copyright line
#
Last = 0
for Index in range(len(CommentList)-1, 0, -1):
for Index in xrange(len(CommentList)-1, 0, -1):
Line = CommentList[Index][0]
if _IsCopyrightLine(Line):
Last = Index
@ -206,14 +206,17 @@ def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
Base = 16
else:
Base = 10
ErrorCode = int(Value, Base)
ErrorCode = long(Value, Base)
if ErrorCode > PCD_ERR_CODE_MAX_SIZE or ErrorCode < 0:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
File = ContainerFile,
Line = LineNum)
return hex(ErrorCode)
#
# To delete the tailing 'L'
#
return hex(ErrorCode)[:-1]
except ValueError as XStr:
if XStr:
pass

View File

@ -14,6 +14,7 @@
'''
ExpressionValidate
'''
from __future__ import print_function
##
# Import Modules

View File

@ -32,7 +32,7 @@ from os import linesep
from os import walk
from os import environ
import re
from collections import UserDict as IterableUserDict
from UserDict import IterableUserDict
import Logger.Log as Logger
from Logger import StringTable as ST
@ -160,23 +160,19 @@ def RemoveDirectory(Directory, Recursively=False):
# or not
#
def SaveFileOnChange(File, Content, IsBinaryFile=True):
if not IsBinaryFile:
Content = Content.replace("\n", linesep)
if os.path.exists(File):
try:
if isinstance(Content, bytes):
if Content == __FileHookOpen__(File, "rb").read():
return False
else:
if Content == __FileHookOpen__(File, "r").read():
return False
if Content == __FileHookOpen__(File, "rb").read():
return False
except BaseException:
Logger.Error(None, ToolError.FILE_OPEN_FAILURE, ExtraData=File)
CreateDirectory(os.path.dirname(File))
try:
if isinstance(Content, bytes):
FileFd = __FileHookOpen__(File, "wb")
else:
FileFd = __FileHookOpen__(File, "w")
FileFd = __FileHookOpen__(File, "wb")
FileFd.write(Content)
FileFd.close()
except BaseException:
@ -441,7 +437,7 @@ class Sdict(IterableUserDict):
def CommonPath(PathList):
Path1 = min(PathList).split(os.path.sep)
Path2 = max(PathList).split(os.path.sep)
for Index in range(min(len(Path1), len(Path2))):
for Index in xrange(min(len(Path1), len(Path2))):
if Path1[Index] != Path2[Index]:
return os.path.sep.join(Path1[:Index])
return os.path.sep.join(Path1)
@ -894,7 +890,7 @@ def ProcessEdkComment(LineList):
if FindEdkBlockComment:
if FirstPos == -1:
FirstPos = StartPos
for Index in range(StartPos, EndPos+1):
for Index in xrange(StartPos, EndPos+1):
LineList[Index] = ''
FindEdkBlockComment = False
elif Line.find("//") != -1 and not Line.startswith("#"):
@ -961,7 +957,7 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo):
FileLinesList = []
try:
FInputfile = open(FullFileName, "r")
FInputfile = open(FullFileName, "rb", 0)
try:
FileLinesList = FInputfile.readlines()
except BaseException:

View File

@ -727,7 +727,7 @@ def IsValidUserId(UserId):
#
def CheckUTF16FileHeader(File):
FileIn = open(File, 'rb').read(2)
if FileIn != b'\xff\xfe':
if FileIn != '\xff\xfe':
return False
return True

View File

@ -16,6 +16,7 @@
'''
Parsing
'''
from __future__ import absolute_import
##
# Import Modules
@ -973,7 +974,7 @@ def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
ArchList = GetSplitValueList(SectionAttrs, DataType.TAB_COMMENT_SPLIT)
else:
ArchList = [SectionAttrs]
for Index in range(0, len(ArchList)):
for Index in xrange(0, len(ArchList)):
ArchList[Index] = ConvertArchForInstall(ArchList[Index])
Section = '[' + SectionName + '.' + (', ' + SectionName + '.').join(ArchList) + ']'
else:

View File

@ -20,6 +20,7 @@ StringUtils
#
import re
import os.path
from string import strip
import Logger.Log as Logger
import Library.DataType as DataType
from Logger.ToolError import FORMAT_INVALID
@ -43,7 +44,7 @@ gMACRO_PATTERN = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
#
#
def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
return list(map(lambda l: l.strip(), String.split(SplitTag, MaxSplit)))
return map(lambda l: l.strip(), String.split(SplitTag, MaxSplit))
## MergeArches
#
@ -434,7 +435,7 @@ def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCh
#
LineList[1] = CleanString(LineList[1], CommentCharacter)
if ValueSplitFlag:
Value = map(lambda x: x.strip(), LineList[1].split(ValueSplitCharacter))
Value = map(strip, LineList[1].split(ValueSplitCharacter))
else:
Value = CleanString(LineList[1], CommentCharacter).splitlines()
@ -501,7 +502,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
#
# Regenerate FileContent
#
NewFileContent = NewFileContent + Line + '\n'
NewFileContent = NewFileContent + Line + '\r\n'
if IsFailed:
Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
@ -679,7 +680,9 @@ def GetHelpTextList(HelpTextClassList):
# @param String: the source string
#
def StringArrayLength(String):
if String.startswith('L"'):
if isinstance(String, unicode):
return (len(String) + 1) * 2 + 1
elif String.startswith('L"'):
return (len(String) - 3 + 1) * 2
elif String.startswith('"'):
return (len(String) - 2 + 1)
@ -937,14 +940,14 @@ def SplitPcdEntry(String):
def IsMatchArch(Arch1, Arch2):
if 'COMMON' in Arch1 or 'COMMON' in Arch2:
return True
if isinstance(Arch1, str) and isinstance(Arch2, str):
if isinstance(Arch1, basestring) and isinstance(Arch2, basestring):
if Arch1 == Arch2:
return True
if isinstance(Arch1, str) and isinstance(Arch2, list):
if isinstance(Arch1, basestring) and isinstance(Arch2, list):
return Arch1 in Arch2
if isinstance(Arch2, str) and isinstance(Arch1, list):
if isinstance(Arch2, basestring) and isinstance(Arch1, list):
return Arch2 in Arch1
if isinstance(Arch1, list) and isinstance(Arch2, list):

Some files were not shown because too many files have changed in this diff Show More