BaseTools: Clean some coding style issues

This patch clean some coding style issues, majorly for space character.

Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Yonghong Zhu <yonghong.zhu@intel.com>
Reviewed-by: Liming Gao <liming.gao@intel.com>

git-svn-id: https://svn.code.sf.net/p/edk2/code/trunk/edk2@19080 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
Yonghong Zhu
2015-12-01 04:22:16 +00:00
committed by yzhu52
parent 9913dce8ae
commit 47fea6afd7
28 changed files with 557 additions and 557 deletions

View File

@ -293,7 +293,7 @@ class WorkspaceAutoGen(AutoGen):
if self.ArchList: if self.ArchList:
EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList))) EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget)) EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
EdkLogger.info('%-16s = %s' % ("Toolchain",self.ToolChain)) EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform)) EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
if BuildModule: if BuildModule:
@ -438,7 +438,7 @@ class WorkspaceAutoGen(AutoGen):
else: else:
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[Module.Guid.upper()].CurrentLineNum, _GuidDict[Module.Guid.upper()].CurrentLineNum,
_GuidDict[Module.Guid.upper()].CurrentLineContent, _GuidDict[Module.Guid.upper()].CurrentLineContent,
@ -464,7 +464,7 @@ class WorkspaceAutoGen(AutoGen):
else: else:
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[InfObj.Guid.upper()].CurrentLineNum, _GuidDict[InfObj.Guid.upper()].CurrentLineNum,
_GuidDict[InfObj.Guid.upper()].CurrentLineContent, _GuidDict[InfObj.Guid.upper()].CurrentLineContent,
@ -506,7 +506,7 @@ class WorkspaceAutoGen(AutoGen):
# #
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"The format of PCD value is incorrect. PCD: %s , Value: %s\n"%(_PcdName, PcdItem.DefaultValue), "The format of PCD value is incorrect. PCD: %s , Value: %s\n" % (_PcdName, PcdItem.DefaultValue),
ExtraData=self.FdfFile) ExtraData=self.FdfFile)
if not _PcdGuidString.upper() in _GuidDict.keys(): if not _PcdGuidString.upper() in _GuidDict.keys():
@ -516,7 +516,7 @@ class WorkspaceAutoGen(AutoGen):
else: else:
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[_PcdGuidString.upper()].CurrentLineNum, _GuidDict[_PcdGuidString.upper()].CurrentLineNum,
_GuidDict[_PcdGuidString.upper()].CurrentLineContent, _GuidDict[_PcdGuidString.upper()].CurrentLineContent,
@ -531,7 +531,7 @@ class WorkspaceAutoGen(AutoGen):
# #
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum, _GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum,
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent, _GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent,
@ -672,7 +672,7 @@ class WorkspaceAutoGen(AutoGen):
# #
# Sort same token value PCD list with TokenGuid and TokenCName # Sort same token value PCD list with TokenGuid and TokenCName
# #
SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName))) SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))
SameTokenValuePcdListCount = 0 SameTokenValuePcdListCount = 0
while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1): while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount] TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
@ -691,7 +691,7 @@ class WorkspaceAutoGen(AutoGen):
Count += 1 Count += 1
PcdList = Package.Pcds.values() PcdList = Package.Pcds.values()
PcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName))) PcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))
Count = 0 Count = 0
while (Count < len(PcdList) - 1) : while (Count < len(PcdList) - 1) :
Item = PcdList[Count] Item = PcdList[Count]
@ -961,7 +961,7 @@ class PlatformAutoGen(AutoGen):
self.Platform.Modules[F].M = M self.Platform.Modules[F].M = M
for PcdFromModule in M.ModulePcdList+M.LibraryPcdList: for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set # make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']: if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F)) NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
@ -1850,11 +1850,11 @@ class PlatformAutoGen(AutoGen):
# #
def ApplyPcdSetting(self, Module, Pcds): def ApplyPcdSetting(self, Module, Pcds):
# for each PCD in module # for each PCD in module
for Name,Guid in Pcds: for Name, Guid in Pcds:
PcdInModule = Pcds[Name,Guid] PcdInModule = Pcds[Name, Guid]
# find out the PCD setting in platform # find out the PCD setting in platform
if (Name,Guid) in self.Platform.Pcds: if (Name, Guid) in self.Platform.Pcds:
PcdInPlatform = self.Platform.Pcds[Name,Guid] PcdInPlatform = self.Platform.Pcds[Name, Guid]
else: else:
PcdInPlatform = None PcdInPlatform = None
# then override the settings if any # then override the settings if any
@ -1940,7 +1940,7 @@ class PlatformAutoGen(AutoGen):
if Attr == "*": if Attr == "*":
PriorityValue &= 0x11110 PriorityValue &= 0x11110
return self.PrioList["0x%0.5x"%PriorityValue] return self.PrioList["0x%0.5x" % PriorityValue]
## Expand * in build option key ## Expand * in build option key
@ -2045,7 +2045,7 @@ class PlatformAutoGen(AutoGen):
Family = Key[0] Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_") Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it # if tool chain family doesn't match, skip it
if Tool not in self.ToolDefinition or Family =="": if Tool not in self.ToolDefinition or Family == "":
continue continue
# option has been added before # option has been added before
if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]: if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
@ -2637,9 +2637,9 @@ class ModuleAutoGen(AutoGen):
# is the former use /I , the Latter used -I to specify include directories # is the former use /I , the Latter used -I to specify include directories
# #
if self.PlatformInfo.ToolChainFamily in ('MSFT'): if self.PlatformInfo.ToolChainFamily in ('MSFT'):
gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE|re.DOTALL) gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'): elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE|re.DOTALL) gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
else: else:
# #
# New ToolChainFamily, don't known whether there is option to specify include directories # New ToolChainFamily, don't known whether there is option to specify include directories
@ -2676,8 +2676,8 @@ class ModuleAutoGen(AutoGen):
ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption) ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
EdkLogger.error("build", EdkLogger.error("build",
PARAMETER_INVALID, PARAMETER_INVALID,
ExtraData = ErrMsg, ExtraData=ErrMsg,
File = str(self.MetaFile)) File=str(self.MetaFile))
BuildOptionIncPathList += IncPathList BuildOptionIncPathList += IncPathList
@ -3292,7 +3292,7 @@ class ModuleAutoGen(AutoGen):
'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [], 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [], 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
'module_arch' : self.Arch, 'module_arch' : self.Arch,
'package_item' : ['%s' % (Package.MetaFile.File.replace('\\','/')) for Package in Packages], 'package_item' : ['%s' % (Package.MetaFile.File.replace('\\', '/')) for Package in Packages],
'binary_item' : [], 'binary_item' : [],
'patchablepcd_item' : [], 'patchablepcd_item' : [],
'pcd_item' : [], 'pcd_item' : [],
@ -3316,10 +3316,10 @@ class ModuleAutoGen(AutoGen):
if 'PI_SPECIFICATION_VERSION' in self.Specification: if 'PI_SPECIFICATION_VERSION' in self.Specification:
AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']] AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']]
OutputDir = self.OutputDir.replace('\\','/').strip('/') OutputDir = self.OutputDir.replace('\\', '/').strip('/')
if self.ModuleType in ['BASE', 'USER_DEFINED']: if self.ModuleType in ['BASE', 'USER_DEFINED']:
for Item in self.CodaTargetList: for Item in self.CodaTargetList:
File = Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/') File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
if Item.Target.Ext.lower() == '.aml': if Item.Target.Ext.lower() == '.aml':
AsBuiltInfDict['binary_item'] += ['ASL|' + File] AsBuiltInfDict['binary_item'] += ['ASL|' + File]
elif Item.Target.Ext.lower() == '.acpi': elif Item.Target.Ext.lower() == '.acpi':
@ -3328,7 +3328,7 @@ class ModuleAutoGen(AutoGen):
AsBuiltInfDict['binary_item'] += ['BIN|' + File] AsBuiltInfDict['binary_item'] += ['BIN|' + File]
else: else:
for Item in self.CodaTargetList: for Item in self.CodaTargetList:
File = Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/') File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
if Item.Target.Ext.lower() == '.efi': if Item.Target.Ext.lower() == '.efi':
AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi'] AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi']
else: else:
@ -3336,7 +3336,7 @@ class ModuleAutoGen(AutoGen):
if self.DepexGenerated: if self.DepexGenerated:
if self.ModuleType in ['PEIM']: if self.ModuleType in ['PEIM']:
AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex'] AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex']
if self.ModuleType in ['DXE_DRIVER','DXE_RUNTIME_DRIVER','DXE_SAL_DRIVER','UEFI_DRIVER']: if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'UEFI_DRIVER']:
AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name + '.depex'] AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name + '.depex']
if self.ModuleType in ['DXE_SMM_DRIVER']: if self.ModuleType in ['DXE_SMM_DRIVER']:
AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex'] AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex']

View File

@ -388,7 +388,7 @@ class BuildRule:
# find the build_rule_version # find the build_rule_version
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) <> -1: if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) <> -1:
if Line.find("=") <> -1 and Line.find("=") < (len(Line)-1) and (Line[(Line.find("=") + 1):]).split(): if Line.find("=") <> -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0] self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]
# skip empty or comment line # skip empty or comment line
if Line == "" or Line[0] == "#": if Line == "" or Line[0] == "#":
@ -470,16 +470,16 @@ class BuildRule:
if TokenList[0] == "BUILD": if TokenList[0] == "BUILD":
if len(TokenList) == 1: if len(TokenList) == 1:
EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section", EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
FileType = TokenList[1] FileType = TokenList[1]
if FileType == '': if FileType == '':
EdkLogger.error("build", FORMAT_INVALID, "No file type given", EdkLogger.error("build", FORMAT_INVALID, "No file type given",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if self._FileTypePattern.match(FileType) == None: if self._FileTypePattern.match(FileType) == None:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type") ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
# new format: File-Type.Build-Type.Arch # new format: File-Type.Build-Type.Arch
else: else:
@ -488,7 +488,7 @@ class BuildRule:
elif FileType != TokenList[0]: elif FileType != TokenList[0]:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Different file types are not allowed in the same rule section", "Different file types are not allowed in the same rule section",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1: if len(TokenList) > 1:
BuildType = TokenList[1] BuildType = TokenList[1]
@ -502,12 +502,12 @@ class BuildRule:
if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1: if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Specific build types must not be mixed with common one", "Specific build types must not be mixed with common one",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if 'COMMON' in self._ArchList and len(self._ArchList) > 1: if 'COMMON' in self._ArchList and len(self._ArchList) > 1:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Specific ARCH must not be mixed with common one", "Specific ARCH must not be mixed with common one",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
self._FileType = FileType self._FileType = FileType
@ -531,7 +531,7 @@ class BuildRule:
elif SectionType != Type: elif SectionType != Type:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Two different section types are not allowed in the same sub-section", "Two different section types are not allowed in the same sub-section",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1: if len(TokenList) > 1:
@ -548,10 +548,10 @@ class BuildRule:
if 'COMMON' in FamilyList and len(FamilyList) > 1: if 'COMMON' in FamilyList and len(FamilyList) > 1:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Specific tool chain family should not be mixed with general one", "Specific tool chain family should not be mixed with general one",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if self._State not in self._StateHandler: if self._State not in self._StateHandler:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex]) ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])
## Parse <InputFile> sub-section ## Parse <InputFile> sub-section
# #

View File

@ -27,7 +27,7 @@ from BuildEngine import *
import Common.GlobalData as GlobalData import Common.GlobalData as GlobalData
## Regular expression for finding header file inclusions ## Regular expression for finding header file inclusions
gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE|re.UNICODE|re.IGNORECASE) gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
## Regular expression for matching macro used in header file inclusion ## Regular expression for matching macro used in header file inclusion
gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE) gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
@ -520,9 +520,9 @@ cleanlib:
FileMacro = "" FileMacro = ""
IncludePathList = [] IncludePathList = []
for P in self._AutoGenObject.IncludePathList: for P in self._AutoGenObject.IncludePathList:
IncludePathList.append(IncPrefix+self.PlaceMacro(P, self.Macros)) IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros: if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix+P) self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
FileMacro += self._FILE_MACRO_TEMPLATE.Replace( FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
{ {
"macro_name" : "INC", "macro_name" : "INC",
@ -533,7 +533,7 @@ cleanlib:
# Generate macros used to represent files containing list of input files # Generate macros used to represent files containing list of input files
for ListFileMacro in self.ListFileMacros: for ListFileMacro in self.ListFileMacros:
ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro)-5]) ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName)) FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
SaveFileOnChange( SaveFileOnChange(
ListFileName, ListFileName,
@ -767,7 +767,7 @@ cleanlib:
try: try:
Fd = open(F.Path, 'r') Fd = open(F.Path, 'r')
except BaseException, X: except BaseException, X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path+"\n\t"+str(X)) EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
FileContent = Fd.read() FileContent = Fd.read()
Fd.close() Fd.close()

View File

@ -784,11 +784,11 @@ def BuildExDataBase(Dict):
DbTotal = [InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable, DbTotal = [InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen, PcdTokenTable,PcdCNameTable, LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen, PcdTokenTable,PcdCNameTable,
SizeTableValue, InitValueUint16, VardefValueUint16,InitValueUint8, VardefValueUint8, InitValueBoolean, SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
VardefValueBoolean, SkuidValue, SkuIndexValue, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean] VardefValueBoolean, SkuidValue, SkuIndexValue, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
DbItemTotal = [DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable, DbItemTotal = [DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable, DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16,DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean, DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbSkuidValue, DbSkuIndexValue, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean] DbVardefValueBoolean, DbSkuidValue, DbSkuIndexValue, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
# SkuidValue is the last table in the init table items # SkuidValue is the last table in the init table items
@ -1343,7 +1343,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure) Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"': elif Sku.DefaultValue[0] == '"':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue) DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue) -2 + 1 Size = len(Sku.DefaultValue) - 2 + 1
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure) Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{': elif Sku.DefaultValue[0] == '{':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue) DefaultValueBinStructure = StringToArray(Sku.DefaultValue)

View File

@ -113,7 +113,7 @@ def DecToHexStr(Dec, Digit = 8):
# @retval: A list for formatted hex string # @retval: A list for formatted hex string
# #
def DecToHexList(Dec, Digit = 8): def DecToHexList(Dec, Digit = 8):
Hex = eval("'%0" + str(Digit) + "X' % int(Dec)" ) Hex = eval("'%0" + str(Digit) + "X' % int(Dec)")
List = [] List = []
for Bit in range(Digit - 2, -1, -2): for Bit in range(Digit - 2, -1, -2):
List.append(HexHeader + Hex[Bit:Bit + 2]) List.append(HexHeader + Hex[Bit:Bit + 2])
@ -192,7 +192,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
UnusedStr = WriteLine(UnusedStr, Line) UnusedStr = WriteLine(UnusedStr, Line)
Str = ''.join([Str,UnusedStr]) Str = ''.join([Str, UnusedStr])
Str = WriteLine(Str, '') Str = WriteLine(Str, '')
if IsCompatibleMode or UniGenCFlag: if IsCompatibleMode or UniGenCFlag:
@ -235,7 +235,7 @@ def CreateCFileHeader():
# #
def CreateBinBuffer(BinBuffer, Array): def CreateBinBuffer(BinBuffer, Array):
for Item in Array: for Item in Array:
BinBuffer.write(pack("B", int(Item,16))) BinBuffer.write(pack("B", int(Item, 16)))
## Create a formatted string all items in an array ## Create a formatted string all items in an array
# #
@ -450,14 +450,14 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
if UniBinBuffer: if UniBinBuffer:
CreateBinBuffer (UniBinBuffer, List) CreateBinBuffer (UniBinBuffer, List)
UniBinBuffer.write (StringBuffer.getvalue()) UniBinBuffer.write (StringBuffer.getvalue())
UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END,16))) UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
StringBuffer.close() StringBuffer.close()
# #
# Create line for string variable name # Create line for string variable name
# "unsigned char $(BaseName)Strings[] = {" # "unsigned char $(BaseName)Strings[] = {"
# #
AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n' ) AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')
if IsCompatibleMode: if IsCompatibleMode:
# #
@ -618,13 +618,13 @@ def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, Ski
# Write an item # Write an item
# #
def Write(Target, Item): def Write(Target, Item):
return ''.join([Target,Item]) return ''.join([Target, Item])
# #
# Write an item with a break line # Write an item with a break line
# #
def WriteLine(Target, Item): def WriteLine(Target, Item):
return ''.join([Target,Item,'\n']) return ''.join([Target, Item, '\n'])
# This acts like the main() function for the script, unless it is 'import'ed into another # This acts like the main() function for the script, unless it is 'import'ed into another
# script. # script.

View File

@ -248,7 +248,7 @@ class UniFileClassObject(object):
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File); EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
LineNo = GetLineNo(FileIn, Line, False) LineNo = GetLineNo(FileIn, Line, False)
EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition", EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File = File, Line = LineNo) ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
else: else:
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File) LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
LangPrintName = Lang[2] LangPrintName = Lang[2]
@ -352,7 +352,7 @@ class UniFileClassObject(object):
if Name != '': if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE) MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString == None or MatchString.end(0) != len(Name): if MatchString == None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File)) EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File))
LanguageList = Item.split(u'#language ') LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)): for IndexI in range(len(LanguageList)):
if IndexI == 0: if IndexI == 0:
@ -512,7 +512,7 @@ class UniFileClassObject(object):
if not self.IsCompatibleMode and Name != '': if not self.IsCompatibleMode and Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE) MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString == None or MatchString.end(0) != len(Name): if MatchString == None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File)) EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File))
self.AddStringToList(Name, Language, Value) self.AddStringToList(Name, Language, Value)
continue continue

View File

@ -49,15 +49,15 @@ class PcdEntry:
if self.PcdValue == '' : if self.PcdValue == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" %(self.PcdCName, self.FileName, self.Lineno)) "Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdOffset == '' : if self.PcdOffset == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" %(self.PcdCName, self.FileName, self.Lineno)) "Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdSize == '' : if self.PcdSize == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" %(self.PcdCName, self.FileName, self.Lineno)) "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))
self._GenOffsetValue () self._GenOffsetValue ()
@ -199,7 +199,7 @@ class PcdEntry:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno)) "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try: try:
self.PcdValue= pack('%ds' % Size, ValueString) self.PcdValue = pack('%ds' % Size, ValueString)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
@ -246,7 +246,7 @@ class PcdEntry:
if Value > 255: if Value > 255:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" %\ "The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno)) (ValueList[Index], ValueString, self.FileName, self.Lineno))
ReturnArray.append(Value) ReturnArray.append(Value)
@ -262,10 +262,10 @@ class PcdEntry:
# #
def _PackUnicode(self, UnicodeString, Size): def _PackUnicode(self, UnicodeString, Size):
if (Size < 0): if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" %\ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % \
(self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno)) (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (len(UnicodeString) < 3): if (len(UnicodeString) < 3):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" %\ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % \
(self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno)) (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))
UnicodeString = UnicodeString[2:-1] UnicodeString = UnicodeString[2:-1]
@ -322,11 +322,11 @@ class GenVPD :
try: try:
self.FileLinesList = fInputfile.readlines() self.FileLinesList = fInputfile.readlines()
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" %InputFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" % InputFileName, None)
finally: finally:
fInputfile.close() fInputfile.close()
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %InputFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
## ##
# Parser the input file which is generated by the build tool. Convert the value of each pcd's # Parser the input file which is generated by the build tool. Convert the value of each pcd's
@ -362,7 +362,7 @@ class GenVPD :
ValueList[4] = PtrValue[0] ValueList[4] = PtrValue[0]
self.FileLinesList[count] = ValueList self.FileLinesList[count] = ValueList
# Store the line number # Store the line number
self.FileLinesList[count].append(str(count+1)) self.FileLinesList[count].append(str(count + 1))
elif len(line) <= 1 : elif len(line) <= 1 :
# Set the blank line to "None" # Set the blank line to "None"
self.FileLinesList[count] = None self.FileLinesList[count] = None
@ -474,12 +474,12 @@ class GenVPD :
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset # Sort fixed offset list in order to find out where has free spaces for the pcd's offset
# value is "*" to insert into. # value is "*" to insert into.
self.PcdFixedOffsetSizeList.sort(lambda x,y: cmp(x.PcdBinOffset, y.PcdBinOffset)) self.PcdFixedOffsetSizeList.sort(lambda x, y: cmp(x.PcdBinOffset, y.PcdBinOffset))
# #
# Sort the un-fixed pcd's offset by it's size. # Sort the un-fixed pcd's offset by it's size.
# #
self.PcdUnknownOffsetList.sort(lambda x,y: cmp(x.PcdBinSize, y.PcdBinSize)) self.PcdUnknownOffsetList.sort(lambda x, y: cmp(x.PcdBinSize, y.PcdBinSize))
# #
# Process all Offset value are "*" # Process all Offset value are "*"
@ -509,21 +509,21 @@ class GenVPD :
# Two pcd's offset is same # Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset : if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is same with %s at line: %s in file %s" %\ "The offset of %s at line: %s is same with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName), (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None) None)
# Overlapped # Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset : if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is overlapped with %s at line: %s in file %s" %\ "The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName), (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None) None)
# Has free space, raise a warning message # Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset : if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s at line: %s and %s at line: %s in file %s" %\ "The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName), (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None) None)
count += 1 count += 1
@ -587,7 +587,7 @@ class GenVPD :
# Usually it will not enter into this thunk, if so, means it overlapped. # Usually it will not enter into this thunk, if so, means it overlapped.
else : else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" %\ "The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno), (eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None) None)
FixOffsetSizeListCount += 1 FixOffsetSizeListCount += 1
@ -618,16 +618,16 @@ class GenVPD :
#Open an VPD file to process #Open an VPD file to process
try: try:
fVpdFile = open (BinFileName, "wb", 0) fVpdFile = open(BinFileName, "wb", 0)
except: except:
# Open failed # Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.VpdFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
try : try :
fMapFile = open (MapFileName, "w", 0) fMapFile = open(MapFileName, "w", 0)
except: except:
# Open failed # Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.MapFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of StringIO to cache data # Use a instance of StringIO to cache data
fStringIO = StringIO.StringIO('') fStringIO = StringIO.StringIO('')
@ -636,14 +636,14 @@ class GenVPD :
try : try :
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n") fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
for eachPcd in self.PcdFixedOffsetSizeList : for eachPcd in self.PcdFixedOffsetSizeList :
# write map file # write map file
try : try :
fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue)) fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue))
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
# Write Vpd binary file # Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset) fStringIO.seek (eachPcd.PcdBinOffset)
@ -656,7 +656,7 @@ class GenVPD :
try : try :
fVpdFile.write (fStringIO.getvalue()) fVpdFile.write (fStringIO.getvalue())
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.VpdFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None)
fStringIO.close () fStringIO.close ()
fVpdFile.close () fVpdFile.close ()

View File

@ -27,19 +27,19 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
# #
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
Keys = [] Keys = []
for Line in F: for Line in F:
if Line.startswith(CommentCharacter): if Line.startswith(CommentCharacter):
continue continue
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].split() Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys: if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
if ValueSplitFlag: if ValueSplitFlag:
Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter) Dictionary[Key[0]] = LineList[1].replace('\\', '/').split(ValueSplitCharacter)
else: else:
Dictionary[Key[0]] = LineList[1].strip().replace('\\','/') Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')
Keys += [Key[0]] Keys += [Key[0]]
F.close() F.close()
return 0 return 0

View File

@ -59,7 +59,7 @@ class EdkIIWorkspace:
# #
# Load TianoCoreOrgLogo, used for GUI tool # Load TianoCoreOrgLogo, used for GUI tool
# #
self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF) self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'), wx.BITMAP_TYPE_GIF)
except: except:
self.Icon = None self.Icon = None
@ -219,19 +219,19 @@ class EdkIIWorkspace:
# #
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
except: except:
return False return False
Keys = [] Keys = []
for Line in F: for Line in F:
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].split() Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys: if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
if ValueSplitFlag: if ValueSplitFlag:
Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter) Dictionary[Key[0]] = LineList[1].replace('\\', '/').split(ValueSplitCharacter)
else: else:
Dictionary[Key[0]] = LineList[1].strip().replace('\\','/') Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')
Keys += [Key[0]] Keys += [Key[0]]
F.close() F.close()
return True return True
@ -252,7 +252,7 @@ def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplit
# #
def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
Lines = [] Lines = []
Lines = F.readlines() Lines = F.readlines()
F.close() F.close()
@ -265,7 +265,7 @@ def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplit
MaxLength = len(Key) MaxLength = len(Key)
Index = 0 Index = 0
for Line in Lines: for Line in Lines:
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].split() Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary: if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary:
@ -275,17 +275,17 @@ def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplit
Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]]) Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]])
Lines.pop(Index) Lines.pop(Index)
if Key[0] in Keys: if Key[0] in Keys:
Lines.insert(Index,Line) Lines.insert(Index, Line)
Keys.remove(Key[0]) Keys.remove(Key[0])
Index += 1 Index += 1
for RemainingKey in Keys: for RemainingKey in Keys:
if ValueSplitFlag: if ValueSplitFlag:
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter,' '.join(Dictionary[RemainingKey])) Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, ' '.join(Dictionary[RemainingKey]))
else: else:
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey]) Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey])
Lines.append(Line) Lines.append(Line)
try: try:
F = open(FileName,'w') F = open(FileName, 'w')
except: except:
return False return False
F.writelines(Lines) F.writelines(Lines)

View File

@ -69,7 +69,7 @@ class Warning (Exception):
# @param File The FDF name # @param File The FDF name
# @param Line The Line number that error occurs # @param Line The Line number that error occurs
# #
def __init__(self, Str, File = None, Line = None): def __init__(self, Str, File=None, Line=None):
FileLineTuple = GetRealFileLine(File, Line) FileLineTuple = GetRealFileLine(File, Line)
self.FileName = FileLineTuple[0] self.FileName = FileLineTuple[0]
@ -360,7 +360,7 @@ class FdfParser(object):
raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber) raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber)
return Str return Str
def __ReplaceFragment(self, StartPos, EndPos, Value = ' '): def __ReplaceFragment(self, StartPos, EndPos, Value=' '):
if StartPos[0] == EndPos[0]: if StartPos[0] == EndPos[0]:
Offset = StartPos[1] Offset = StartPos[1]
while Offset <= EndPos[1]: while Offset <= EndPos[1]:

View File

@ -519,7 +519,7 @@ def GetXmlFileInfo(FileName, TagTuple):
# @retval Options A optparse object containing the parsed options. # @retval Options A optparse object containing the parsed options.
# @retval InputFile Path of an source file to be migrated. # @retval InputFile Path of an source file to be migrated.
# #
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0): def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
# use clearer usage to override default usage message # use clearer usage to override default usage message
UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
Version = "%s Version %.2f" % (ToolName, VersionNumber) Version = "%s Version %.2f" % (ToolName, VersionNumber)

View File

@ -38,7 +38,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws from Common.MultipleWorkspace import MultipleWorkspace as mws
## Regular expression used to find out place holders in string template ## Regular expression used to find out place holders in string template
gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE) gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE)
## Dictionary used to store file time stamp for quick re-access ## Dictionary used to store file time stamp for quick re-access
gFileTimeStampCache = {} # {file path : file time stamp} gFileTimeStampCache = {} # {file path : file time stamp}
@ -293,11 +293,11 @@ def ProcessVariableArgument(Option, OptionString, Value, Parser):
def GuidStringToGuidStructureString(Guid): def GuidStringToGuidStructureString(Guid):
GuidList = Guid.split('-') GuidList = Guid.split('-')
Result = '{' Result = '{'
for Index in range(0,3,1): for Index in range(0, 3, 1):
Result = Result + '0x' + GuidList[Index] + ', ' Result = Result + '0x' + GuidList[Index] + ', '
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4] Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
for Index in range(0,12,2): for Index in range(0, 12, 2):
Result = Result + ', 0x' + GuidList[4][Index:Index+2] Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
Result += '}}' Result += '}}'
return Result return Result
@ -494,7 +494,7 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True):
Fd.write(Content) Fd.write(Content)
Fd.close() Fd.close()
except IOError, X: except IOError, X:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X) EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
return True return True
@ -613,7 +613,7 @@ class DirCache:
# #
# @retval A list of all files # @retval A list of all files
# #
def GetFiles(Root, SkipList=None, FullPath = True): def GetFiles(Root, SkipList=None, FullPath=True):
OriPath = Root OriPath = Root
FileList = [] FileList = []
for Root, Dirs, Files in os.walk(Root): for Root, Dirs, Files in os.walk(Root):
@ -663,7 +663,7 @@ def RealPath2(File, Dir='', OverrideDir=''):
if OverrideDir[-1] == os.path.sep: if OverrideDir[-1] == os.path.sep:
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)] return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
else: else:
return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)]
if GlobalData.gAllFiles: if GlobalData.gAllFiles:
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))] NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
if not NewFile: if not NewFile:
@ -675,7 +675,7 @@ def RealPath2(File, Dir='', OverrideDir=''):
if Dir[-1] == os.path.sep: if Dir[-1] == os.path.sep:
return NewFile[len(Dir):], NewFile[0:len(Dir)] return NewFile[len(Dir):], NewFile[0:len(Dir)]
else: else:
return NewFile[len(Dir)+1:], NewFile[0:len(Dir)] return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
else: else:
return NewFile, '' return NewFile, ''
@ -701,7 +701,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
# Replace the default dir to current dir # Replace the default dir to current dir
if Dir == '.': if Dir == '.':
Dir = os.getcwd() Dir = os.getcwd()
Dir = Dir[len(Workspace)+1:] Dir = Dir[len(Workspace) + 1:]
# First check if File has Edk definition itself # First check if File has Edk definition itself
if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1: if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:
@ -740,7 +740,7 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.'
# Dir is current module dir related to workspace # Dir is current module dir related to workspace
if Dir == '.': if Dir == '.':
Dir = os.getcwd() Dir = os.getcwd()
Dir = Dir[len(Workspace)+1:] Dir = Dir[len(Workspace) + 1:]
NewFile = File NewFile = File
RelaPath = AllFiles[os.path.normpath(Dir)] RelaPath = AllFiles[os.path.normpath(Dir)]
@ -865,7 +865,7 @@ class TemplateString(object):
# #
# PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint
# #
for PlaceHolder,Start,End in PlaceHolderList: for PlaceHolder, Start, End in PlaceHolderList:
self._SubSectionList.append(TemplateSection[SubSectionStart:Start]) self._SubSectionList.append(TemplateSection[SubSectionStart:Start])
self._SubSectionList.append(TemplateSection[Start:End]) self._SubSectionList.append(TemplateSection[Start:End])
self._PlaceHolderList.append(PlaceHolder) self._PlaceHolderList.append(PlaceHolder)
@ -1251,11 +1251,11 @@ class tdict:
if len(key) > 1: if len(key) > 1:
RestKeys = key[1:] RestKeys = key[1:]
elif self._Level_ > 1: elif self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else: else:
FirstKey = key FirstKey = key
if self._Level_ > 1: if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList: if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:
FirstKey = self._Wildcard FirstKey = self._Wildcard
@ -1328,11 +1328,11 @@ class tdict:
if len(key) > 1: if len(key) > 1:
RestKeys = key[1:] RestKeys = key[1:]
else: else:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else: else:
FirstKey = key FirstKey = key
if self._Level_ > 1: if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey in self._ValidWildcardList: if FirstKey in self._ValidWildcardList:
FirstKey = self._Wildcard FirstKey = self._Wildcard
@ -1662,7 +1662,7 @@ def SplitOption(OptionString):
if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]: if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
if Index > OptionStart: if Index > OptionStart:
OptionList.append(OptionString[OptionStart:Index-1]) OptionList.append(OptionString[OptionStart:Index - 1])
OptionStart = Index OptionStart = Index
LastChar = CurrentChar LastChar = CurrentChar
OptionList.append(OptionString[OptionStart:]) OptionList.append(OptionString[OptionStart:])
@ -1739,7 +1739,7 @@ class PathClass(object):
if self.Root[-1] == os.path.sep: if self.Root[-1] == os.path.sep:
self.File = self.Path[len(self.Root):] self.File = self.Path[len(self.Root):]
else: else:
self.File = self.Path[len(self.Root)+1:] self.File = self.Path[len(self.Root) + 1:]
else: else:
self.Path = os.path.normpath(self.File) self.Path = os.path.normpath(self.File)

View File

@ -42,7 +42,7 @@ gDefaultToolsDefFile = "tools_def.txt"
# @var MacroDictionary: To store keys and values defined in DEFINE statement # @var MacroDictionary: To store keys and values defined in DEFINE statement
# #
class ToolDefClassObject(object): class ToolDefClassObject(object):
def __init__(self, FileName = None): def __init__(self, FileName=None):
self.ToolsDefTxtDictionary = {} self.ToolsDefTxtDictionary = {}
self.MacroDictionary = {} self.MacroDictionary = {}
for Env in os.environ: for Env in os.environ:
@ -61,7 +61,7 @@ class ToolDefClassObject(object):
FileContent = [] FileContent = []
if os.path.isfile(FileName): if os.path.isfile(FileName):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
FileContent = F.readlines() FileContent = F.readlines()
except: except:
EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName) EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
@ -155,7 +155,7 @@ class ToolDefClassObject(object):
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE] KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE]
for Index in range(3,-1,-1): for Index in range(3, -1, -1):
for Key in dict(self.ToolsDefTxtDictionary): for Key in dict(self.ToolsDefTxtDictionary):
List = Key.split('_') List = Key.split('_')
if List[Index] == '*': if List[Index] == '*':

View File

@ -34,7 +34,7 @@ import Common.DataType
import Common.GlobalData as GlobalData import Common.GlobalData as GlobalData
from Common import EdkLogger from Common import EdkLogger
from Common.String import * from Common.String import *
from Common.Misc import DirCache,PathClass from Common.Misc import DirCache, PathClass
from Common.Misc import SaveFileOnChange from Common.Misc import SaveFileOnChange
from Common.Misc import ClearDuplicatedInf from Common.Misc import ClearDuplicatedInf
from Common.Misc import GuidStructureStringToGuidString from Common.Misc import GuidStructureStringToGuidString
@ -93,7 +93,7 @@ def main():
if 'EDK_SOURCE' in os.environ.keys(): if 'EDK_SOURCE' in os.environ.keys():
GenFdsGlobalVariable.EdkSourceDir = os.path.normcase(os.environ['EDK_SOURCE']) GenFdsGlobalVariable.EdkSourceDir = os.path.normcase(os.environ['EDK_SOURCE'])
if (Options.debug): if (Options.debug):
GenFdsGlobalVariable.VerboseLogger( "Using Workspace:" + Workspace) GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace)
os.chdir(GenFdsGlobalVariable.WorkSpaceDir) os.chdir(GenFdsGlobalVariable.WorkSpaceDir)
# set multiple workspace # set multiple workspace
@ -106,7 +106,7 @@ def main():
if FdfFilename[0:2] == '..': if FdfFilename[0:2] == '..':
FdfFilename = os.path.realpath(FdfFilename) FdfFilename = os.path.realpath(FdfFilename)
if not os.path.isabs (FdfFilename): if not os.path.isabs(FdfFilename):
FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename) FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename)
if not os.path.exists(FdfFilename): if not os.path.exists(FdfFilename):
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename) EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename)
@ -287,7 +287,7 @@ def main():
GenFds.DisplayFvSpaceInfo(FdfParserObj) GenFds.DisplayFvSpaceInfo(FdfParserObj)
except FdfParser.Warning, X: except FdfParser.Warning, X:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID ReturnCode = FORMAT_INVALID
except FatalError, X: except FatalError, X:
if Options.debug != None: if Options.debug != None:
@ -326,7 +326,7 @@ def SingleCheckCallback(option, opt_str, value, parser):
# #
def myOptionParser(): def myOptionParser():
usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\"" usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\""
Parser = OptionParser(usage=usage,description=__copyright__,version="%prog " + str(versionNumber)) Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback) Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback)
Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH") Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
@ -503,7 +503,7 @@ class GenFds :
if UsedSizeValue == TotalSizeValue: if UsedSizeValue == TotalSizeValue:
Percentage = '100' Percentage = '100'
else: else:
Percentage = str((UsedSizeValue+0.0)/TotalSizeValue)[0:4].lstrip('0.') Percentage = str((UsedSizeValue + 0.0) / TotalSizeValue)[0:4].lstrip('0.')
GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free') GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free')

View File

@ -274,7 +274,7 @@ class GenFdsGlobalVariable:
# @param ArchList The Arch list of platform # @param ArchList The Arch list of platform
# #
def SetDir (OutputDir, FdfParser, WorkSpace, ArchList): def SetDir (OutputDir, FdfParser, WorkSpace, ArchList):
GenFdsGlobalVariable.VerboseLogger( "GenFdsGlobalVariable.OutputDir :%s" %OutputDir) GenFdsGlobalVariable.VerboseLogger("GenFdsGlobalVariable.OutputDir :%s" % OutputDir)
# GenFdsGlobalVariable.OutputDirDict = OutputDir # GenFdsGlobalVariable.OutputDirDict = OutputDir
GenFdsGlobalVariable.FdfParser = FdfParser GenFdsGlobalVariable.FdfParser = FdfParser
GenFdsGlobalVariable.WorkSpace = WorkSpace GenFdsGlobalVariable.WorkSpace = WorkSpace
@ -292,7 +292,7 @@ class GenFdsGlobalVariable:
# Create FV Address inf file # Create FV Address inf file
# #
GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf') GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
FvAddressFile = open (GenFdsGlobalVariable.FvAddressFileName, 'w') FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
# #
# Add [Options] # Add [Options]
# #
@ -386,7 +386,7 @@ class GenFdsGlobalVariable:
CommandFile = Output + '.txt' CommandFile = Output + '.txt'
if Ui not in [None, '']: if Ui not in [None, '']:
#Cmd += ["-n", '"' + Ui + '"'] #Cmd += ["-n", '"' + Ui + '"']
SectionData = array.array('B', [0,0,0,0]) SectionData = array.array('B', [0, 0, 0, 0])
SectionData.fromstring(Ui.encode("utf_16_le")) SectionData.fromstring(Ui.encode("utf_16_le"))
SectionData.append(0) SectionData.append(0)
SectionData.append(0) SectionData.append(0)
@ -463,9 +463,9 @@ class GenFdsGlobalVariable:
Cmd += ["-r", BaseAddress] Cmd += ["-r", BaseAddress]
if ForceRebase == False: if ForceRebase == False:
Cmd +=["-F", "FALSE"] Cmd += ["-F", "FALSE"]
elif ForceRebase == True: elif ForceRebase == True:
Cmd +=["-F", "TRUE"] Cmd += ["-F", "TRUE"]
if Capsule: if Capsule:
Cmd += ["-c"] Cmd += ["-c"]
@ -606,7 +606,7 @@ class GenFdsGlobalVariable:
sys.stdout.write('\n') sys.stdout.write('\n')
try: try:
PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr= subprocess.PIPE, shell=True) PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception, X: except Exception, X:
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0])) EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
(out, error) = PopenObject.communicate() (out, error) = PopenObject.communicate()
@ -618,7 +618,7 @@ class GenFdsGlobalVariable:
returnValue[0] = PopenObject.returncode returnValue[0] = PopenObject.returncode
return return
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
GenFdsGlobalVariable.InfLogger ("Return Value = %d" %PopenObject.returncode) GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
GenFdsGlobalVariable.InfLogger (out) GenFdsGlobalVariable.InfLogger (out)
GenFdsGlobalVariable.InfLogger (error) GenFdsGlobalVariable.InfLogger (error)
if PopenObject.returncode != 0: if PopenObject.returncode != 0:
@ -631,7 +631,7 @@ class GenFdsGlobalVariable:
def InfLogger (msg): def InfLogger (msg):
EdkLogger.info(msg) EdkLogger.info(msg)
def ErrorLogger (msg, File = None, Line = None, ExtraData = None): def ErrorLogger (msg, File=None, Line=None, ExtraData=None):
EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData) EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData)
def DebugLogger (Level, msg): def DebugLogger (Level, msg):
@ -642,7 +642,7 @@ class GenFdsGlobalVariable:
# @param Str String that may contain macro # @param Str String that may contain macro
# @param MacroDict Dictionary that contains macro value pair # @param MacroDict Dictionary that contains macro value pair
# #
def MacroExtend (Str, MacroDict = {}, Arch = 'COMMON'): def MacroExtend (Str, MacroDict={}, Arch='COMMON'):
if Str == None : if Str == None :
return None return None

View File

@ -53,7 +53,7 @@ class GuidSection(GuidSectionClassObject) :
# @param Dict dictionary contains macro and its value # @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment) # @retval tuple (Generated file name, section alignment)
# #
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):
# #
# Generate all section # Generate all section
# #
@ -84,7 +84,7 @@ class GuidSection(GuidSectionClassObject) :
for Sect in self.SectionList: for Sect in self.SectionList:
Index = Index + 1 Index = Index + 1
SecIndex = '%s.%d' %(SecNum,Index) SecIndex = '%s.%d' % (SecNum, Index)
# set base address for inside FvImage # set base address for inside FvImage
if isinstance(Sect, FvImageSection): if isinstance(Sect, FvImageSection):
if self.FvAddr != []: if self.FvAddr != []:
@ -93,7 +93,7 @@ class GuidSection(GuidSectionClassObject) :
elif isinstance(Sect, GuidSection): elif isinstance(Sect, GuidSection):
Sect.FvAddr = self.FvAddr Sect.FvAddr = self.FvAddr
Sect.FvParentAddr = self.FvParentAddr Sect.FvParentAddr = self.FvParentAddr
ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList,FfsInf, Dict) ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict)
if isinstance(Sect, GuidSection): if isinstance(Sect, GuidSection):
if Sect.IncludeFvSection: if Sect.IncludeFvSection:
self.IncludeFvSection = Sect.IncludeFvSection self.IncludeFvSection = Sect.IncludeFvSection
@ -135,7 +135,7 @@ class GuidSection(GuidSectionClassObject) :
# GENCRC32 section # GENCRC32 section
# #
if self.NameGuid == None : if self.NameGuid == None :
GenFdsGlobalVariable.VerboseLogger( "Use GenSection function Generate CRC32 Section") GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")
GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign) GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign)
OutputFileList = [] OutputFileList = []
OutputFileList.append(OutputFile) OutputFileList.append(OutputFile)
@ -144,7 +144,7 @@ class GuidSection(GuidSectionClassObject) :
elif ExternalTool == None: elif ExternalTool == None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid) EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
else: else:
DummyFile = OutputFile+".dummy" DummyFile = OutputFile + ".dummy"
# #
# Call GenSection with DUMMY section type. # Call GenSection with DUMMY section type.
# #
@ -197,12 +197,12 @@ class GuidSection(GuidSectionClassObject) :
if not os.path.exists(TempFile): if not os.path.exists(TempFile):
EdkLogger.error("GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool) EdkLogger.error("GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool)
FileHandleIn = open(DummyFile,'rb') FileHandleIn = open(DummyFile, 'rb')
FileHandleIn.seek(0,2) FileHandleIn.seek(0, 2)
InputFileSize = FileHandleIn.tell() InputFileSize = FileHandleIn.tell()
FileHandleOut = open(TempFile,'rb') FileHandleOut = open(TempFile, 'rb')
FileHandleOut.seek(0,2) FileHandleOut.seek(0, 2)
TempFileSize = FileHandleOut.tell() TempFileSize = FileHandleOut.tell()
Attribute = [] Attribute = []
@ -263,7 +263,7 @@ class GuidSection(GuidSectionClassObject) :
ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']: if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain) EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)
self.KeyStringList = [Target+'_'+ToolChain+'_'+self.CurrentArchList[0]] self.KeyStringList = [Target + '_' + ToolChain + '_' + self.CurrentArchList[0]]
for Arch in self.CurrentArchList: for Arch in self.CurrentArchList:
if Target + '_' + ToolChain + '_' + Arch not in self.KeyStringList: if Target + '_' + ToolChain + '_' + Arch not in self.KeyStringList:
self.KeyStringList.append(Target + '_' + ToolChain + '_' + Arch) self.KeyStringList.append(Target + '_' + ToolChain + '_' + Arch)
@ -281,13 +281,13 @@ class GuidSection(GuidSectionClassObject) :
KeyList[2] KeyList[2]
if Key in self.KeyStringList and KeyList[4] == 'GUID': if Key in self.KeyStringList and KeyList[4] == 'GUID':
ToolPath = ToolDefinition.get( Key + \ ToolPath = ToolDefinition.get(Key + \
'_' + \ '_' + \
KeyList[3] + \ KeyList[3] + \
'_' + \ '_' + \
'PATH') 'PATH')
ToolOption = ToolDefinition.get( Key + \ ToolOption = ToolDefinition.get(Key + \
'_' + \ '_' + \
KeyList[3] + \ KeyList[3] + \
'_' + \ '_' + \

View File

@ -54,10 +54,10 @@ class Region(RegionClassObject):
# @retval string Generated FV file path # @retval string Generated FV file path
# #
def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict = None, MacroDict = {}): def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict=None, MacroDict={}):
Size = self.Size Size = self.Size
GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset) GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset)
GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" %Size) GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size)
GenFdsGlobalVariable.SharpCounter = 0 GenFdsGlobalVariable.SharpCounter = 0
if self.RegionType == 'FV': if self.RegionType == 'FV':
@ -65,13 +65,13 @@ class Region(RegionClassObject):
# Get Fv from FvDict # Get Fv from FvDict
# #
self.FvAddress = int(BaseAddress, 16) + self.Offset self.FvAddress = int(BaseAddress, 16) + self.Offset
FvBaseAddress = '0x%X' %self.FvAddress FvBaseAddress = '0x%X' % self.FvAddress
FvOffset = 0 FvOffset = 0
for RegionData in self.RegionDataList: for RegionData in self.RegionDataList:
FileName = None FileName = None
if RegionData.endswith(".fv"): if RegionData.endswith(".fv"):
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s'%RegionData) GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s' % RegionData)
if RegionData[1] != ':' : if RegionData[1] != ':' :
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
if not os.path.exists(RegionData): if not os.path.exists(RegionData):
@ -101,7 +101,7 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
FvBuffer = StringIO.StringIO('') FvBuffer = StringIO.StringIO('')
FvBaseAddress = '0x%X' %self.FvAddress FvBaseAddress = '0x%X' % self.FvAddress
BlockSize = None BlockSize = None
BlockNum = None BlockNum = None
FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict) FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)
@ -128,7 +128,7 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"Size of FV File (%s) is larger than Region Size 0x%X specified." \ "Size of FV File (%s) is larger than Region Size 0x%X specified." \
% (RegionData, Size)) % (RegionData, Size))
BinFile = open (FileName, 'r+b') BinFile = open(FileName, 'r+b')
Buffer.write(BinFile.read()) Buffer.write(BinFile.read())
BinFile.close() BinFile.close()
Size = Size - FileLength Size = Size - FileLength
@ -150,7 +150,7 @@ class Region(RegionClassObject):
for RegionData in self.RegionDataList: for RegionData in self.RegionDataList:
if RegionData.endswith(".cap"): if RegionData.endswith(".cap"):
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s'%RegionData) GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s' % RegionData)
if RegionData[1] != ':' : if RegionData[1] != ':' :
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
if not os.path.exists(RegionData): if not os.path.exists(RegionData):
@ -187,7 +187,7 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \ "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \
% (FileLength, RegionData, Size)) % (FileLength, RegionData, Size))
BinFile = open (FileName, 'r+b') BinFile = open(FileName, 'r+b')
Buffer.write(BinFile.read()) Buffer.write(BinFile.read())
BinFile.close() BinFile.close()
Size = Size - FileLength Size = Size - FileLength
@ -217,8 +217,8 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"Size of File (%s) is larger than Region Size 0x%X specified." \ "Size of File (%s) is larger than Region Size 0x%X specified." \
% (RegionData, Size)) % (RegionData, Size))
GenFdsGlobalVariable.InfLogger(' Region File Name = %s'%RegionData) GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData)
BinFile = open (RegionData, 'rb') BinFile = open(RegionData, 'rb')
Buffer.write(BinFile.read()) Buffer.write(BinFile.read())
BinFile.close() BinFile.close()
Size = Size - FileLength Size = Size - FileLength
@ -273,15 +273,15 @@ class Region(RegionClassObject):
Granu = 1024 Granu = 1024
Str = Str[:-1] Str = Str[:-1]
elif Str.endswith('M'): elif Str.endswith('M'):
Granu = 1024*1024 Granu = 1024 * 1024
Str = Str[:-1] Str = Str[:-1]
elif Str.endswith('G'): elif Str.endswith('G'):
Granu = 1024*1024*1024 Granu = 1024 * 1024 * 1024
Str = Str[:-1] Str = Str[:-1]
else: else:
pass pass
AlignValue = int(Str)*Granu AlignValue = int(Str) * Granu
return AlignValue return AlignValue
## BlockSizeOfRegion() ## BlockSizeOfRegion()
@ -304,7 +304,7 @@ class Region(RegionClassObject):
else: else:
# region ended within current blocks # region ended within current blocks
if self.Offset + self.Size <= End: if self.Offset + self.Size <= End:
ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1)/BlockSize)) ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1) / BlockSize))
break break
# region not ended yet # region not ended yet
else: else:
@ -313,7 +313,7 @@ class Region(RegionClassObject):
UsedBlockNum = BlockNum UsedBlockNum = BlockNum
# region started in middle of current blocks # region started in middle of current blocks
else: else:
UsedBlockNum = (End - self.Offset)/BlockSize UsedBlockNum = (End - self.Offset) / BlockSize
Start = End Start = End
ExpectedList.append((BlockSize, UsedBlockNum)) ExpectedList.append((BlockSize, UsedBlockNum))
RemindingSize -= BlockSize * UsedBlockNum RemindingSize -= BlockSize * UsedBlockNum
@ -333,22 +333,22 @@ class Region(RegionClassObject):
Sum += Item[0] * Item[1] Sum += Item[0] * Item[1]
if self.Size < Sum: if self.Size < Sum:
EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x " EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x "
%(FvObj.UiFvName, Sum, self.Size)) % (FvObj.UiFvName, Sum, self.Size))
# check whether the BlockStatements in FV section is appropriate # check whether the BlockStatements in FV section is appropriate
ExpectedListData = '' ExpectedListData = ''
for Item in ExpectedList: for Item in ExpectedList:
ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t"%Item ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t" % Item
Index = 0 Index = 0
for Item in FvObj.BlockSizeList: for Item in FvObj.BlockSizeList:
if Item[0] != ExpectedList[Index][0]: if Item[0] != ExpectedList[Index][0]:
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement" EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
%FvObj.UiFvName, ExtraData = ExpectedListData) % FvObj.UiFvName, ExtraData=ExpectedListData)
elif Item[1] != ExpectedList[Index][1]: elif Item[1] != ExpectedList[Index][1]:
if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1): if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1):
break; break;
else: else:
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement" EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
%FvObj.UiFvName, ExtraData = ExpectedListData) % FvObj.UiFvName, ExtraData=ExpectedListData)
else: else:
Index += 1 Index += 1

View File

@ -48,7 +48,7 @@ class UiSection (UiSectionClassObject):
# @param Dict dictionary contains macro and its value # @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment) # @retval tuple (Generated file name, section alignment)
# #
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):
# #
# Prepare the parameter of GenSection # Prepare the parameter of GenSection
# #

View File

@ -48,7 +48,7 @@ class VerSection (VerSectionClassObject):
# @param Dict dictionary contains macro and its value # @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment) # @retval tuple (Generated file name, section alignment)
# #
def GenSection(self,OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):
# #
# Prepare the parameter of GenSection # Prepare the parameter of GenSection
# #

View File

@ -67,14 +67,14 @@ class Vtf (VtfClassObject):
def GenBsfInf (self): def GenBsfInf (self):
FvList = self.GetFvList() FvList = self.GetFvList()
self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf') self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')
BsfInf = open (self.BsfInfName, 'w+') BsfInf = open(self.BsfInfName, 'w+')
if self.ResetBin != None: if self.ResetBin != None:
BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF) BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF)
BsfInf.writelines ("IA32_RST_BIN" + \ BsfInf.writelines ("IA32_RST_BIN" + \
" = " + \ " = " + \
GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.ResetBin)) + \ GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.ResetBin)) + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines (T_CHAR_LF ) BsfInf.writelines (T_CHAR_LF)
BsfInf.writelines ("[COMPONENTS]" + T_CHAR_LF) BsfInf.writelines ("[COMPONENTS]" + T_CHAR_LF)
@ -82,43 +82,43 @@ class Vtf (VtfClassObject):
BsfInf.writelines ("COMP_NAME" + \ BsfInf.writelines ("COMP_NAME" + \
" = " + \ " = " + \
ComponentObj.CompName + \ ComponentObj.CompName + \
T_CHAR_LF ) T_CHAR_LF)
if ComponentObj.CompLoc.upper() == 'NONE': if ComponentObj.CompLoc.upper() == 'NONE':
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
'N' + \ 'N' + \
T_CHAR_LF ) T_CHAR_LF)
elif ComponentObj.FilePos != None: elif ComponentObj.FilePos != None:
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
ComponentObj.FilePos + \ ComponentObj.FilePos + \
T_CHAR_LF ) T_CHAR_LF)
else: else:
Index = FvList.index(ComponentObj.CompLoc.upper()) Index = FvList.index(ComponentObj.CompLoc.upper())
if Index == 0: if Index == 0:
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
'F' + \ 'F' + \
T_CHAR_LF ) T_CHAR_LF)
elif Index == 1: elif Index == 1:
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
'S' + \ 'S' + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_TYPE" + \ BsfInf.writelines ("COMP_TYPE" + \
" = " + \ " = " + \
ComponentObj.CompType + \ ComponentObj.CompType + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_VER" + \ BsfInf.writelines ("COMP_VER" + \
" = " + \ " = " + \
ComponentObj.CompVer + \ ComponentObj.CompVer + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_CS" + \ BsfInf.writelines ("COMP_CS" + \
" = " + \ " = " + \
ComponentObj.CompCs + \ ComponentObj.CompCs + \
T_CHAR_LF ) T_CHAR_LF)
BinPath = ComponentObj.CompBin BinPath = ComponentObj.CompBin
if BinPath != '-': if BinPath != '-':
@ -126,7 +126,7 @@ class Vtf (VtfClassObject):
BsfInf.writelines ("COMP_BIN" + \ BsfInf.writelines ("COMP_BIN" + \
" = " + \ " = " + \
BinPath + \ BinPath + \
T_CHAR_LF ) T_CHAR_LF)
SymPath = ComponentObj.CompSym SymPath = ComponentObj.CompSym
if SymPath != '-': if SymPath != '-':
@ -134,12 +134,12 @@ class Vtf (VtfClassObject):
BsfInf.writelines ("COMP_SYM" + \ BsfInf.writelines ("COMP_SYM" + \
" = " + \ " = " + \
SymPath + \ SymPath + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_SIZE" + \ BsfInf.writelines ("COMP_SIZE" + \
" = " + \ " = " + \
ComponentObj.CompSize + \ ComponentObj.CompSize + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines (T_CHAR_LF ) BsfInf.writelines (T_CHAR_LF)
BsfInf.close() BsfInf.close()
@ -170,7 +170,7 @@ class Vtf (VtfClassObject):
(BaseAddress, Size) = FdAddressDict.get(i) (BaseAddress, Size) = FdAddressDict.get(i)
CmdStr += ( CmdStr += (
'-r', '0x%x' % BaseAddress, '-r', '0x%x' % BaseAddress,
'-s', '0x%x' %Size, '-s', '0x%x' % Size,
) )
return CmdStr return CmdStr

View File

@ -48,7 +48,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
# #
# Length of Binary File # Length of Binary File
# #
FileHandle = open (FileName, 'rb') FileHandle = open(FileName, 'rb')
FileHandle.seek (0, 2) FileHandle.seek (0, 2)
FileLength = FileHandle.tell() FileLength = FileHandle.tell()
FileHandle.close() FileHandle.close()
@ -75,7 +75,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD." return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD."
ValueLength = int(MaxSize) ValueLength = int(MaxSize)
else: else:
return PARAMETER_INVALID, "PCD type %s is not valid." %(CommandOptions.PcdTypeName) return PARAMETER_INVALID, "PCD type %s is not valid." % (CommandOptions.PcdTypeName)
# #
# Check PcdValue is in the input binary file. # Check PcdValue is in the input binary file.
# #
@ -84,7 +84,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
# #
# Read binary file into array # Read binary file into array
# #
FileHandle = open (FileName, 'rb') FileHandle = open(FileName, 'rb')
ByteArray = array.array('B') ByteArray = array.array('B')
ByteArray.fromfile(FileHandle, FileLength) ByteArray.fromfile(FileHandle, FileLength)
FileHandle.close() FileHandle.close()
@ -117,7 +117,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
if ValueNumber != 0: if ValueNumber != 0:
ValueNumber = 1 ValueNumber = 1
except: except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." %(ValueString) return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
# #
# Set PCD value into binary data # Set PCD value into binary data
# #
@ -132,7 +132,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
else: else:
ValueNumber = int (ValueString) ValueNumber = int (ValueString)
except: except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." %(ValueString) return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
# #
# Set PCD value into binary data # Set PCD value into binary data
# #
@ -174,7 +174,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
if Index >= ValueLength: if Index >= ValueLength:
break break
except: except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." %(ValueString) return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)
else: else:
# #
# Patch ascii string # Patch ascii string
@ -197,10 +197,10 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
if ByteList != OrigByteList: if ByteList != OrigByteList:
ByteArray = array.array('B') ByteArray = array.array('B')
ByteArray.fromlist(ByteList) ByteArray.fromlist(ByteList)
FileHandle = open (FileName, 'wb') FileHandle = open(FileName, 'wb')
ByteArray.tofile(FileHandle) ByteArray.tofile(FileHandle)
FileHandle.close() FileHandle.close()
return 0, "Patch Value into File %s successfully." %(FileName) return 0, "Patch Value into File %s successfully." % (FileName)
## Parse command line options ## Parse command line options
# #
@ -270,7 +270,7 @@ def Main():
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.") EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
return 1 return 1
if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]: if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]:
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." %(CommandOptions.PcdTypeName)) EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
return 1 return 1
if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None: if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.") EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")

View File

@ -68,7 +68,7 @@ class TableReport(Table):
# @param Enabled: If this error enabled # @param Enabled: If this error enabled
# @param Corrected: if this error corrected # @param Corrected: if this error corrected
# #
def Insert(self, ErrorID, OtherMsg = '', BelongsToTable = '', BelongsToItem = -1, Enabled = 0, Corrected = -1): def Insert(self, ErrorID, OtherMsg='', BelongsToTable='', BelongsToItem= -1, Enabled=0, Corrected= -1):
self.ID = self.ID + 1 self.ID = self.ID + 1
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \
% (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected) % (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected)
@ -98,7 +98,7 @@ class TableReport(Table):
# #
# @param Filename: To filename to save the report content # @param Filename: To filename to save the report content
# #
def ToCSV(self, Filename = 'Report.csv'): def ToCSV(self, Filename='Report.csv'):
try: try:
File = open(Filename, 'w+') File = open(Filename, 'w+')
File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""") File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""")

View File

@ -1525,7 +1525,7 @@ class DscParser(MetaFileParser):
ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType) ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)
if not Valid: if not Valid:
EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex+1, EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1,
ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2])) ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
PcdValue = ValList[Index] PcdValue = ValList[Index]
if PcdValue: if PcdValue:

View File

@ -1863,7 +1863,7 @@ class InfBuildData(ModuleBuildClassObject):
LineNo = Record[6] LineNo = Record[6]
break break
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType,' '.join(l for l in SUP_MODULE_LIST)), "MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
File=self.MetaFile, Line=LineNo) File=self.MetaFile, Line=LineNo)
if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A): if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
if self._ModuleType == SUP_MODULE_SMM_CORE: if self._ModuleType == SUP_MODULE_SMM_CORE:
@ -1901,7 +1901,7 @@ class InfBuildData(ModuleBuildClassObject):
Macros["EDK_SOURCE"] = GlobalData.gEcpSource Macros["EDK_SOURCE"] = GlobalData.gEcpSource
Macros['PROCESSOR'] = self._Arch Macros['PROCESSOR'] = self._Arch
RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform] RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform]
for Name,Value,Dummy,Arch,Platform,ID,LineNo in RecordList: for Name, Value, Dummy, Arch, Platform, ID, LineNo in RecordList:
Value = ReplaceMacro(Value, Macros, True) Value = ReplaceMacro(Value, Macros, True)
if Name == "IMAGE_ENTRY_POINT": if Name == "IMAGE_ENTRY_POINT":
if self._ModuleEntryPointList == None: if self._ModuleEntryPointList == None:
@ -2584,7 +2584,7 @@ class InfBuildData(ModuleBuildClassObject):
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdCName, str(Package)), "No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
# #
@ -2597,7 +2597,7 @@ class InfBuildData(ModuleBuildClassObject):
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)), "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
@ -2611,16 +2611,16 @@ class InfBuildData(ModuleBuildClassObject):
EdkLogger.error( EdkLogger.error(
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)), "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
except: except:
EdkLogger.error( EdkLogger.error(
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)), "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
@ -2635,7 +2635,7 @@ class InfBuildData(ModuleBuildClassObject):
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile), "PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages]) ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages])
) )
Pcds[PcdCName, TokenSpaceGuid] = Pcd Pcds[PcdCName, TokenSpaceGuid] = Pcd
@ -2946,7 +2946,7 @@ determine whether database file is out of date!\n")
## Summarize all packages in the database ## Summarize all packages in the database
def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag): def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag):
self.Platform = Platform self.Platform = Platform
PackageList =[] PackageList = []
Pa = self.BuildObject[self.Platform, 'COMMON'] Pa = self.BuildObject[self.Platform, 'COMMON']
# #
# Get Package related to Modules # Get Package related to Modules

View File

@ -78,13 +78,13 @@ gLineMaxLength = 120
gEndOfLine = "\r\n" gEndOfLine = "\r\n"
## Tags for section start, end and separator ## Tags for section start, end and separator
gSectionStart = ">" + "=" * (gLineMaxLength-2) + "<" gSectionStart = ">" + "=" * (gLineMaxLength - 2) + "<"
gSectionEnd = "<" + "=" * (gLineMaxLength-2) + ">" + "\n" gSectionEnd = "<" + "=" * (gLineMaxLength - 2) + ">" + "\n"
gSectionSep = "=" * gLineMaxLength gSectionSep = "=" * gLineMaxLength
## Tags for subsection start, end and separator ## Tags for subsection start, end and separator
gSubSectionStart = ">" + "-" * (gLineMaxLength-2) + "<" gSubSectionStart = ">" + "-" * (gLineMaxLength - 2) + "<"
gSubSectionEnd = "<" + "-" * (gLineMaxLength-2) + ">" gSubSectionEnd = "<" + "-" * (gLineMaxLength - 2) + ">"
gSubSectionSep = "-" * gLineMaxLength gSubSectionSep = "-" * gLineMaxLength
@ -813,15 +813,15 @@ class PcdReport(object):
# Report PCD item according to their override relationship # Report PCD item according to their override relationship
# #
if DecMatch and InfMatch: if DecMatch and InfMatch:
FileWrite(File, ' %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
else: else:
if DscMatch: if DscMatch:
if (Pcd.TokenCName, Key) in self.FdfPcdSet: if (Pcd.TokenCName, Key) in self.FdfPcdSet:
FileWrite(File, ' *F %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' *F %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
else: else:
FileWrite(File, ' *P %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' *P %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
else: else:
FileWrite(File, ' *M %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' *M %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
if TypeName in ('DYNHII', 'DEXHII', 'DYNVPD', 'DEXVPD'): if TypeName in ('DYNHII', 'DEXHII', 'DYNVPD', 'DEXVPD'):
for SkuInfo in Pcd.SkuInfoList.values(): for SkuInfo in Pcd.SkuInfoList.values():

View File

@ -1136,7 +1136,7 @@ class Build():
FunctionList = [] FunctionList = []
if os.path.exists(ImageMapTable): if os.path.exists(ImageMapTable):
OrigImageBaseAddress = 0 OrigImageBaseAddress = 0
ImageMap = open (ImageMapTable, 'r') ImageMap = open(ImageMapTable, 'r')
for LinStr in ImageMap: for LinStr in ImageMap:
if len (LinStr.strip()) == 0: if len (LinStr.strip()) == 0:
continue continue
@ -1149,7 +1149,7 @@ class Build():
StrList = LinStr.split() StrList = LinStr.split()
if len (StrList) > 4: if len (StrList) > 4:
if StrList[3] == 'f' or StrList[3] =='F': if StrList[3] == 'f' or StrList[3] == 'F':
Name = StrList[1] Name = StrList[1]
RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress
FunctionList.append ((Name, RelativeAddress)) FunctionList.append ((Name, RelativeAddress))
@ -1273,7 +1273,7 @@ class Build():
if not ImageClass.IsValid: if not ImageClass.IsValid:
EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo) EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo)
ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass) ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass)
if Module.ModuleType in ['PEI_CORE', 'PEIM', 'COMBINED_PEIM_DRIVER','PIC_PEIM', 'RELOCATABLE_PEIM', 'DXE_CORE']: if Module.ModuleType in ['PEI_CORE', 'PEIM', 'COMBINED_PEIM_DRIVER', 'PIC_PEIM', 'RELOCATABLE_PEIM', 'DXE_CORE']:
PeiModuleList[Module.MetaFile] = ImageInfo PeiModuleList[Module.MetaFile] = ImageInfo
PeiSize += ImageInfo.Image.Size PeiSize += ImageInfo.Image.Size
elif Module.ModuleType in ['BS_DRIVER', 'DXE_DRIVER', 'UEFI_DRIVER']: elif Module.ModuleType in ['BS_DRIVER', 'DXE_DRIVER', 'UEFI_DRIVER']:
@ -1354,21 +1354,21 @@ class Build():
for PcdInfo in PcdTable: for PcdInfo in PcdTable:
ReturnValue = 0 ReturnValue = 0
if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE: if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize / 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE: elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize / 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE: elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize / 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0: elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize / 0x1000))
if ReturnValue != 0: if ReturnValue != 0:
EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo) EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)
MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize/0x1000)) MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize / 0x1000))
MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize/0x1000)) MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize / 0x1000))
MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize/0x1000)) MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize / 0x1000))
if len (SmmModuleList) > 0: if len (SmmModuleList) > 0:
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000)) MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize / 0x1000))
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
BtBaseAddr = TopMemoryAddress - RtSize BtBaseAddr = TopMemoryAddress - RtSize
@ -1377,7 +1377,7 @@ class Build():
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset = False, ModeIsSmm = True) self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)
MapBuffer.write('\n\n') MapBuffer.write('\n\n')
sys.stdout.write ("\n") sys.stdout.write ("\n")
sys.stdout.flush() sys.stdout.flush()
@ -1395,7 +1395,7 @@ class Build():
SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False) SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False)
MapBuffer.close() MapBuffer.close()
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" %(MapFilePath)) sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))
sys.stdout.flush() sys.stdout.flush()
## Build active platform for different build targets and different tool chains ## Build active platform for different build targets and different tool chains
@ -1529,7 +1529,7 @@ class Build():
BUILD_ERROR, BUILD_ERROR,
"Module for [%s] is not a component of active platform."\ "Module for [%s] is not a component of active platform."\
" Please make sure that the ARCH and inf file path are"\ " Please make sure that the ARCH and inf file path are"\
" given in the same as in [%s]" %\ " given in the same as in [%s]" % \
(', '.join(Wa.ArchList), self.PlatformFile), (', '.join(Wa.ArchList), self.PlatformFile),
ExtraData=self.ModuleFile ExtraData=self.ModuleFile
) )
@ -1874,8 +1874,8 @@ def SingleCheckCallback(option, opt_str, value, parser):
# @retval Args Target of build command # @retval Args Target of build command
# #
def MyOptionParser(): def MyOptionParser():
Parser = OptionParser(description=__copyright__,version=__version__,prog="build.exe",usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]") Parser = OptionParser(description=__copyright__, version=__version__, prog="build.exe", usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]")
Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32','X64','IPF','EBC','ARM', 'AARCH64'], dest="TargetArch", Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32', 'X64', 'IPF', 'EBC', 'ARM', 'AARCH64'], dest="TargetArch",
help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.") help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.")
Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback, Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback,
help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.") help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.")
@ -1917,7 +1917,7 @@ def MyOptionParser():
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".") Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.") Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.")
Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD','LIBRARY','FLASH','DEPEX','BUILD_FLAGS','FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[], Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD', 'LIBRARY', 'FLASH', 'DEPEX', 'BUILD_FLAGS', 'FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[],
help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, EXECUTION_ORDER]. "\ help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, EXECUTION_ORDER]. "\
"To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS]") "To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS]")
Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag", Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",
@ -1929,7 +1929,7 @@ def MyOptionParser():
Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.") Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.")
Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files") Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
(Opt, Args)=Parser.parse_args() (Opt, Args) = Parser.parse_args()
return (Opt, Args) return (Opt, Args)
## Tool entrance method ## Tool entrance method
@ -1985,13 +1985,13 @@ def Main():
Target = "all" Target = "all"
elif len(Target) >= 2: elif len(Target) >= 2:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.", EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.",
ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
else: else:
Target = Target[0].lower() Target = Target[0].lower()
if Target not in gSupportedTarget: if Target not in gSupportedTarget:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target, EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target,
ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
# #
# Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH # Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH
@ -2069,7 +2069,7 @@ def Main():
if Option != None and Option.debug != None: if Option != None and Option.debug != None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
else: else:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID ReturnCode = FORMAT_INVALID
except KeyboardInterrupt: except KeyboardInterrupt:
ReturnCode = ABORT_ERROR ReturnCode = ABORT_ERROR
@ -2110,7 +2110,7 @@ def Main():
BuildDuration = time.gmtime(int(round(FinishTime - StartTime))) BuildDuration = time.gmtime(int(round(FinishTime - StartTime)))
BuildDurationStr = "" BuildDurationStr = ""
if BuildDuration.tm_yday > 1: if BuildDuration.tm_yday > 1:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)"%(BuildDuration.tm_yday - 1) BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)
else: else:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)
if MyBuild != None: if MyBuild != None: