BaseTools: Clean some coding style issues

This patch clean some coding style issues, majorly for space character.

Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Yonghong Zhu <yonghong.zhu@intel.com>
Reviewed-by: Liming Gao <liming.gao@intel.com>

git-svn-id: https://svn.code.sf.net/p/edk2/code/trunk/edk2@19080 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
Yonghong Zhu
2015-12-01 04:22:16 +00:00
committed by yzhu52
parent 9913dce8ae
commit 47fea6afd7
28 changed files with 557 additions and 557 deletions

View File

@ -232,7 +232,7 @@ class WorkspaceAutoGen(AutoGen):
# @param SkuId SKU id from command line # @param SkuId SKU id from command line
# #
def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb, def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None, BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,
Progress=None, BuildModule=None): Progress=None, BuildModule=None):
if Fds is None: if Fds is None:
Fds = [] Fds = []
@ -280,7 +280,7 @@ class WorkspaceAutoGen(AutoGen):
# Validate build target # Validate build target
if self.BuildTarget not in self.Platform.BuildTargets: if self.BuildTarget not in self.Platform.BuildTargets:
EdkLogger.error("build", PARAMETER_INVALID, EdkLogger.error("build", PARAMETER_INVALID,
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]" ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
% (self.BuildTarget, " ".join(self.Platform.BuildTargets))) % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
@ -288,30 +288,30 @@ class WorkspaceAutoGen(AutoGen):
# parse FDF file to get PCDs in it, if any # parse FDF file to get PCDs in it, if any
if not self.FdfFile: if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition self.FdfFile = self.Platform.FlashDefinition
EdkLogger.info("") EdkLogger.info("")
if self.ArchList: if self.ArchList:
EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList))) EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget)) EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
EdkLogger.info('%-16s = %s' % ("Toolchain",self.ToolChain)) EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform)) EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
if BuildModule: if BuildModule:
EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule)) EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
if self.FdfFile: if self.FdfFile:
EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile)) EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))
EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile) EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
if Progress: if Progress:
Progress.Start("\nProcessing meta-data") Progress.Start("\nProcessing meta-data")
if self.FdfFile: if self.FdfFile:
# #
# Mark now build in AutoGen Phase # Mark now build in AutoGen Phase
# #
GlobalData.gAutoGenPhase = True GlobalData.gAutoGenPhase = True
Fdf = FdfParser(self.FdfFile.Path) Fdf = FdfParser(self.FdfFile.Path)
Fdf.ParseFile() Fdf.ParseFile()
GlobalData.gFdfParser = Fdf GlobalData.gFdfParser = Fdf
@ -336,7 +336,7 @@ class WorkspaceAutoGen(AutoGen):
if self.CapTargetList: if self.CapTargetList:
EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList)) EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))
self.CapTargetList = [] self.CapTargetList = []
# apply SKU and inject PCDs from Flash Definition file # apply SKU and inject PCDs from Flash Definition file
for Arch in self.ArchList: for Arch in self.ArchList:
Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain] Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]
@ -391,12 +391,12 @@ class WorkspaceAutoGen(AutoGen):
Pa.CollectPlatformDynamicPcds() Pa.CollectPlatformDynamicPcds()
Pa.CollectFixedAtBuildPcds() Pa.CollectFixedAtBuildPcds()
self.AutoGenObjectList.append(Pa) self.AutoGenObjectList.append(Pa)
# #
# Check PCDs token value conflict in each DEC file. # Check PCDs token value conflict in each DEC file.
# #
self._CheckAllPcdsTokenValueConflict() self._CheckAllPcdsTokenValueConflict()
# #
# Check PCD type and definition between DSC and DEC # Check PCD type and definition between DSC and DEC
# #
@ -425,7 +425,7 @@ class WorkspaceAutoGen(AutoGen):
# #
# Get INF file GUID # Get INF file GUID
# #
InfFoundFlag = False InfFoundFlag = False
for Pa in self.AutoGenObjectList: for Pa in self.AutoGenObjectList:
if InfFoundFlag: if InfFoundFlag:
break break
@ -436,9 +436,9 @@ class WorkspaceAutoGen(AutoGen):
_GuidDict[Module.Guid.upper()] = FfsFile _GuidDict[Module.Guid.upper()] = FfsFile
break break
else: else:
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[Module.Guid.upper()].CurrentLineNum, _GuidDict[Module.Guid.upper()].CurrentLineNum,
_GuidDict[Module.Guid.upper()].CurrentLineContent, _GuidDict[Module.Guid.upper()].CurrentLineContent,
@ -452,7 +452,7 @@ class WorkspaceAutoGen(AutoGen):
InfPath = NormPath(FfsFile.InfFileName) InfPath = NormPath(FfsFile.InfFileName)
if not os.path.exists(InfPath): if not os.path.exists(InfPath):
EdkLogger.error('build', GENFDS_ERROR, "Non-existant Module %s !" % (FfsFile.InfFileName)) EdkLogger.error('build', GENFDS_ERROR, "Non-existant Module %s !" % (FfsFile.InfFileName))
PathClassObj = PathClass(FfsFile.InfFileName, self.WorkspaceDir) PathClassObj = PathClass(FfsFile.InfFileName, self.WorkspaceDir)
# #
# Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use # Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use
@ -462,19 +462,19 @@ class WorkspaceAutoGen(AutoGen):
if not InfObj.Guid.upper() in _GuidDict.keys(): if not InfObj.Guid.upper() in _GuidDict.keys():
_GuidDict[InfObj.Guid.upper()] = FfsFile _GuidDict[InfObj.Guid.upper()] = FfsFile
else: else:
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[InfObj.Guid.upper()].CurrentLineNum, _GuidDict[InfObj.Guid.upper()].CurrentLineNum,
_GuidDict[InfObj.Guid.upper()].CurrentLineContent, _GuidDict[InfObj.Guid.upper()].CurrentLineContent,
InfObj.Guid.upper()), InfObj.Guid.upper()),
ExtraData=self.FdfFile) ExtraData=self.FdfFile)
InfFoundFlag = False InfFoundFlag = False
if FfsFile.NameGuid != None: if FfsFile.NameGuid != None:
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$") _CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")
# #
# If the NameGuid reference a PCD name. # If the NameGuid reference a PCD name.
# The style must match: PCD(xxxx.yyy) # The style must match: PCD(xxxx.yyy)
@ -493,51 +493,51 @@ class WorkspaceAutoGen(AutoGen):
# First convert from CFormatGuid to GUID string # First convert from CFormatGuid to GUID string
# #
_PcdGuidString = GuidStructureStringToGuidString(PcdItem.DefaultValue) _PcdGuidString = GuidStructureStringToGuidString(PcdItem.DefaultValue)
if not _PcdGuidString: if not _PcdGuidString:
# #
# Then try Byte array. # Then try Byte array.
# #
_PcdGuidString = GuidStructureByteArrayToGuidString(PcdItem.DefaultValue) _PcdGuidString = GuidStructureByteArrayToGuidString(PcdItem.DefaultValue)
if not _PcdGuidString: if not _PcdGuidString:
# #
# Not Byte array or CFormat GUID, raise error. # Not Byte array or CFormat GUID, raise error.
# #
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"The format of PCD value is incorrect. PCD: %s , Value: %s\n"%(_PcdName, PcdItem.DefaultValue), "The format of PCD value is incorrect. PCD: %s , Value: %s\n" % (_PcdName, PcdItem.DefaultValue),
ExtraData=self.FdfFile) ExtraData=self.FdfFile)
if not _PcdGuidString.upper() in _GuidDict.keys(): if not _PcdGuidString.upper() in _GuidDict.keys():
_GuidDict[_PcdGuidString.upper()] = FfsFile _GuidDict[_PcdGuidString.upper()] = FfsFile
PcdFoundFlag = True PcdFoundFlag = True
break break
else: else:
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[_PcdGuidString.upper()].CurrentLineNum, _GuidDict[_PcdGuidString.upper()].CurrentLineNum,
_GuidDict[_PcdGuidString.upper()].CurrentLineContent, _GuidDict[_PcdGuidString.upper()].CurrentLineContent,
FfsFile.NameGuid.upper()), FfsFile.NameGuid.upper()),
ExtraData=self.FdfFile) ExtraData=self.FdfFile)
if not FfsFile.NameGuid.upper() in _GuidDict.keys(): if not FfsFile.NameGuid.upper() in _GuidDict.keys():
_GuidDict[FfsFile.NameGuid.upper()] = FfsFile _GuidDict[FfsFile.NameGuid.upper()] = FfsFile
else: else:
# #
# Two raw file GUID conflict. # Two raw file GUID conflict.
# #
EdkLogger.error("build", EdkLogger.error("build",
FORMAT_INVALID, FORMAT_INVALID,
"Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s"%(FfsFile.CurrentLineNum, "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
FfsFile.CurrentLineContent, FfsFile.CurrentLineContent,
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum, _GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum,
_GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent, _GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent,
FfsFile.NameGuid.upper()), FfsFile.NameGuid.upper()),
ExtraData=self.FdfFile) ExtraData=self.FdfFile)
def _CheckPcdDefineAndType(self): def _CheckPcdDefineAndType(self):
PcdTypeList = [ PcdTypeList = [
@ -552,17 +552,17 @@ class WorkspaceAutoGen(AutoGen):
# Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
for Pcd in Pa.Platform.Pcds: for Pcd in Pa.Platform.Pcds:
PcdType = Pa.Platform.Pcds[Pcd].Type PcdType = Pa.Platform.Pcds[Pcd].Type
# If no PCD type, this PCD comes from FDF # If no PCD type, this PCD comes from FDF
if not PcdType: if not PcdType:
continue continue
# Try to remove Hii and Vpd suffix # Try to remove Hii and Vpd suffix
if PcdType.startswith("DynamicEx"): if PcdType.startswith("DynamicEx"):
PcdType = "DynamicEx" PcdType = "DynamicEx"
elif PcdType.startswith("Dynamic"): elif PcdType.startswith("Dynamic"):
PcdType = "Dynamic" PcdType = "Dynamic"
for Package in Pa.PackageList: for Package in Pa.PackageList:
# Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType
if (Pcd[0], Pcd[1], PcdType) in Package.Pcds: if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
@ -640,7 +640,7 @@ class WorkspaceAutoGen(AutoGen):
# BuildCommand should be all the same. So just get one from platform AutoGen # BuildCommand should be all the same. So just get one from platform AutoGen
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand return self._BuildCommand
## Check the PCDs token value conflict in each DEC file. ## Check the PCDs token value conflict in each DEC file.
# #
# Will cause build break and raise error message while two PCDs conflict. # Will cause build break and raise error message while two PCDs conflict.
@ -672,12 +672,12 @@ class WorkspaceAutoGen(AutoGen):
# #
# Sort same token value PCD list with TokenGuid and TokenCName # Sort same token value PCD list with TokenGuid and TokenCName
# #
SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName))) SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))
SameTokenValuePcdListCount = 0 SameTokenValuePcdListCount = 0
while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1): while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount] TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1] TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName): if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
EdkLogger.error( EdkLogger.error(
'build', 'build',
@ -689,13 +689,13 @@ class WorkspaceAutoGen(AutoGen):
SameTokenValuePcdListCount += 1 SameTokenValuePcdListCount += 1
Count += SameTokenValuePcdListCount Count += SameTokenValuePcdListCount
Count += 1 Count += 1
PcdList = Package.Pcds.values() PcdList = Package.Pcds.values()
PcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName))) PcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))
Count = 0 Count = 0
while (Count < len(PcdList) - 1) : while (Count < len(PcdList) - 1) :
Item = PcdList[Count] Item = PcdList[Count]
ItemNext = PcdList[Count + 1] ItemNext = PcdList[Count + 1]
# #
# Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well. # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
# #
@ -786,7 +786,7 @@ class PlatformAutoGen(AutoGen):
"0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
"0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
"0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest) "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
## The real constructor of PlatformAutoGen ## The real constructor of PlatformAutoGen
# #
# This method is not supposed to be called by users of PlatformAutoGen. It's # This method is not supposed to be called by users of PlatformAutoGen. It's
@ -960,8 +960,8 @@ class PlatformAutoGen(AutoGen):
#GuidValue.update(M.Guids) #GuidValue.update(M.Guids)
self.Platform.Modules[F].M = M self.Platform.Modules[F].M = M
for PcdFromModule in M.ModulePcdList+M.LibraryPcdList: for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set # make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']: if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F)) NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
@ -1111,9 +1111,9 @@ class PlatformAutoGen(AutoGen):
if (self.Workspace.ArchList[-1] == self.Arch): if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList: for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type # just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]] Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
Sku.VpdOffset = Sku.VpdOffset.strip() Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue PcdValue = Sku.DefaultValue
if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"): if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
@ -1124,10 +1124,10 @@ class PlatformAutoGen(AutoGen):
else: else:
OtherPcdArray.append(Pcd) OtherPcdArray.append(Pcd)
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]: if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
PlatformPcds = self.Platform.Pcds.keys() PlatformPcds = self.Platform.Pcds.keys()
PlatformPcds.sort() PlatformPcds.sort()
# #
# Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up. # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
# #
@ -1145,8 +1145,8 @@ class PlatformAutoGen(AutoGen):
if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '': if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
EdkLogger.error("Build", FILE_NOT_FOUND, \ EdkLogger.error("Build", FILE_NOT_FOUND, \
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.") "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
# #
# Fix the PCDs define in VPD PCD section that never referenced by module. # Fix the PCDs define in VPD PCD section that never referenced by module.
# An example is PCD for signature usage. # An example is PCD for signature usage.
@ -1161,7 +1161,7 @@ class PlatformAutoGen(AutoGen):
if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(VpdPcd.TokenCName == DscPcdEntry.TokenCName): (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
FoundFlag = True FoundFlag = True
# Not found, it should be signature # Not found, it should be signature
if not FoundFlag : if not FoundFlag :
# just pick the a value to determine whether is unicode string type # just pick the a value to determine whether is unicode string type
@ -1211,7 +1211,7 @@ class PlatformAutoGen(AutoGen):
VpdFile.GetCount() != 0: VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile)) "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
if VpdFile.GetCount() != 0: if VpdFile.GetCount() != 0:
DscTimeStamp = self.Platform.MetaFile.TimeStamp DscTimeStamp = self.Platform.MetaFile.TimeStamp
FvPath = os.path.join(self.BuildDir, "FV") FvPath = os.path.join(self.BuildDir, "FV")
@ -1220,14 +1220,14 @@ class PlatformAutoGen(AutoGen):
os.makedirs(FvPath) os.makedirs(FvPath)
except: except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir) EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid) VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp: if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
VpdFile.Write(VpdFilePath) VpdFile.Write(VpdFilePath)
# retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file. # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
BPDGToolName = None BPDGToolName = None
for ToolDef in self.ToolDefinition.values(): for ToolDef in self.ToolDefinition.values():
@ -1241,13 +1241,13 @@ class PlatformAutoGen(AutoGen):
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath) VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
else: else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.") EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
# Process VPD map file generated by third party BPDG tool # Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile: if NeedProcessVpdMapFile:
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid) VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
if os.path.exists(VpdMapFilePath): if os.path.exists(VpdMapFilePath):
VpdFile.Read(VpdMapFilePath) VpdFile.Read(VpdMapFilePath)
# Fixup "*" offset # Fixup "*" offset
for Pcd in self._DynamicPcdList: for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type # just pick the a value to determine whether is unicode string type
@ -1258,9 +1258,9 @@ class PlatformAutoGen(AutoGen):
i += 1 i += 1
else: else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath) EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
# Delete the DynamicPcdList At the last time enter into this function # Delete the DynamicPcdList At the last time enter into this function
del self._DynamicPcdList[:] del self._DynamicPcdList[:]
self._DynamicPcdList.extend(UnicodePcdArray) self._DynamicPcdList.extend(UnicodePcdArray)
self._DynamicPcdList.extend(HiiPcdArray) self._DynamicPcdList.extend(HiiPcdArray)
self._DynamicPcdList.extend(OtherPcdArray) self._DynamicPcdList.extend(OtherPcdArray)
@ -1471,10 +1471,10 @@ class PlatformAutoGen(AutoGen):
else: else:
if self._BuildRule._FileVersion < AutoGenReqBuildRuleVerNum : if self._BuildRule._FileVersion < AutoGenReqBuildRuleVerNum :
# If Build Rule's version is less than the version number required by the tools, halting the build. # If Build Rule's version is less than the version number required by the tools, halting the build.
EdkLogger.error("build", AUTOGEN_ERROR, EdkLogger.error("build", AUTOGEN_ERROR,
ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\ ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
% (self._BuildRule._FileVersion, AutoGenReqBuildRuleVerNum)) % (self._BuildRule._FileVersion, AutoGenReqBuildRuleVerNum))
return self._BuildRule return self._BuildRule
## Summarize the packages used by modules in this platform ## Summarize the packages used by modules in this platform
@ -1534,28 +1534,28 @@ class PlatformAutoGen(AutoGen):
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber)) EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1 TokenNumber += 1
for Pcd in self.DynamicPcdList: for Pcd in self.DynamicPcdList:
if Pcd.Phase == "PEI": if Pcd.Phase == "PEI":
if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]: if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber)) EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1 TokenNumber += 1
for Pcd in self.DynamicPcdList: for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE": if Pcd.Phase == "DXE":
if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]: if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber)) EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1 TokenNumber += 1
for Pcd in self.DynamicPcdList: for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE": if Pcd.Phase == "DXE":
if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]: if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber)) EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1 TokenNumber += 1
for Pcd in self.NonDynamicPcdList: for Pcd in self.NonDynamicPcdList:
self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1 TokenNumber += 1
@ -1787,7 +1787,7 @@ class PlatformAutoGen(AutoGen):
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\ elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type): and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):
if ToPcd.Type.strip() == "DynamicEx": if ToPcd.Type.strip() == "DynamicEx":
ToPcd.Type = FromPcd.Type ToPcd.Type = FromPcd.Type
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \ elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
and ToPcd.Type != FromPcd.Type: and ToPcd.Type != FromPcd.Type:
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type", EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
@ -1850,11 +1850,11 @@ class PlatformAutoGen(AutoGen):
# #
def ApplyPcdSetting(self, Module, Pcds): def ApplyPcdSetting(self, Module, Pcds):
# for each PCD in module # for each PCD in module
for Name,Guid in Pcds: for Name, Guid in Pcds:
PcdInModule = Pcds[Name,Guid] PcdInModule = Pcds[Name, Guid]
# find out the PCD setting in platform # find out the PCD setting in platform
if (Name,Guid) in self.Platform.Pcds: if (Name, Guid) in self.Platform.Pcds:
PcdInPlatform = self.Platform.Pcds[Name,Guid] PcdInPlatform = self.Platform.Pcds[Name, Guid]
else: else:
PcdInPlatform = None PcdInPlatform = None
# then override the settings if any # then override the settings if any
@ -1927,8 +1927,8 @@ class PlatformAutoGen(AutoGen):
# @retval Value Priority value based on the priority list. # @retval Value Priority value based on the priority list.
# #
def CalculatePriorityValue(self, Key): def CalculatePriorityValue(self, Key):
Target, ToolChain, Arch, CommandType, Attr = Key.split('_') Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
PriorityValue = 0x11111 PriorityValue = 0x11111
if Target == "*": if Target == "*":
PriorityValue &= 0x01111 PriorityValue &= 0x01111
if ToolChain == "*": if ToolChain == "*":
@ -1939,9 +1939,9 @@ class PlatformAutoGen(AutoGen):
PriorityValue &= 0x11101 PriorityValue &= 0x11101
if Attr == "*": if Attr == "*":
PriorityValue &= 0x11110 PriorityValue &= 0x11110
return self.PrioList["0x%0.5x"%PriorityValue] return self.PrioList["0x%0.5x" % PriorityValue]
## Expand * in build option key ## Expand * in build option key
# #
@ -1953,7 +1953,7 @@ class PlatformAutoGen(AutoGen):
BuildOptions = {} BuildOptions = {}
FamilyMatch = False FamilyMatch = False
FamilyIsNull = True FamilyIsNull = True
OverrideList = {} OverrideList = {}
# #
# Construct a list contain the build options which need override. # Construct a list contain the build options which need override.
@ -1970,7 +1970,7 @@ class PlatformAutoGen(AutoGen):
if ToolChain == self.ToolChain or ToolChain == "*": if ToolChain == self.ToolChain or ToolChain == "*":
if Arch == self.Arch or Arch == "*": if Arch == self.Arch or Arch == "*":
if Options[Key].startswith("="): if Options[Key].startswith("="):
if OverrideList.get(Key[1]) != None: if OverrideList.get(Key[1]) != None:
OverrideList.pop(Key[1]) OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key] OverrideList[Key[1]] = Options[Key]
@ -1978,9 +1978,9 @@ class PlatformAutoGen(AutoGen):
# Use the highest priority value. # Use the highest priority value.
# #
if (len(OverrideList) >= 2): if (len(OverrideList) >= 2):
KeyList = OverrideList.keys() KeyList = OverrideList.keys()
for Index in range(len(KeyList)): for Index in range(len(KeyList)):
NowKey = KeyList[Index] NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_") Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
for Index1 in range(len(KeyList) - Index - 1): for Index1 in range(len(KeyList) - Index - 1):
NextKey = KeyList[Index1 + Index + 1] NextKey = KeyList[Index1 + Index + 1]
@ -1994,10 +1994,10 @@ class PlatformAutoGen(AutoGen):
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*": if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*": if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey): if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
if Options.get((self.BuildRuleFamily, NextKey)) != None: if Options.get((self.BuildRuleFamily, NextKey)) != None:
Options.pop((self.BuildRuleFamily, NextKey)) Options.pop((self.BuildRuleFamily, NextKey))
else: else:
if Options.get((self.BuildRuleFamily, NowKey)) != None: if Options.get((self.BuildRuleFamily, NowKey)) != None:
Options.pop((self.BuildRuleFamily, NowKey)) Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options: for Key in Options:
@ -2045,7 +2045,7 @@ class PlatformAutoGen(AutoGen):
Family = Key[0] Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_") Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it # if tool chain family doesn't match, skip it
if Tool not in self.ToolDefinition or Family =="": if Tool not in self.ToolDefinition or Family == "":
continue continue
# option has been added before # option has been added before
if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]: if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
@ -2637,9 +2637,9 @@ class ModuleAutoGen(AutoGen):
# is the former use /I , the Latter used -I to specify include directories # is the former use /I , the Latter used -I to specify include directories
# #
if self.PlatformInfo.ToolChainFamily in ('MSFT'): if self.PlatformInfo.ToolChainFamily in ('MSFT'):
gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE|re.DOTALL) gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'): elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE|re.DOTALL) gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
else: else:
# #
# New ToolChainFamily, don't known whether there is option to specify include directories # New ToolChainFamily, don't known whether there is option to specify include directories
@ -2673,11 +2673,11 @@ class ModuleAutoGen(AutoGen):
if self.AutoGenVersion >= 0x00010005 and len(IncPathList) > 0: if self.AutoGenVersion >= 0x00010005 and len(IncPathList) > 0:
for Path in IncPathList: for Path in IncPathList:
if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir): if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption) ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
EdkLogger.error("build", EdkLogger.error("build",
PARAMETER_INVALID, PARAMETER_INVALID,
ExtraData = ErrMsg, ExtraData=ErrMsg,
File = str(self.MetaFile)) File=str(self.MetaFile))
BuildOptionIncPathList += IncPathList BuildOptionIncPathList += IncPathList
@ -2797,7 +2797,7 @@ class ModuleAutoGen(AutoGen):
if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList: if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:
# Skip all files that are not binary libraries # Skip all files that are not binary libraries
if not self.IsLibrary: if not self.IsLibrary:
continue continue
RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE] RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
elif FileType in self.BuildRules: elif FileType in self.BuildRules:
RuleObject = self.BuildRules[FileType] RuleObject = self.BuildRules[FileType]
@ -3215,7 +3215,7 @@ class ModuleAutoGen(AutoGen):
# Also find all packages that the DynamicEx PCDs depend on # Also find all packages that the DynamicEx PCDs depend on
Pcds = [] Pcds = []
PatchablePcds = {} PatchablePcds = {}
Packages = [] Packages = []
PcdCheckList = [] PcdCheckList = []
PcdTokenSpaceList = [] PcdTokenSpaceList = []
for Pcd in self.ModulePcdList + self.LibraryPcdList: for Pcd in self.ModulePcdList + self.LibraryPcdList:
@ -3292,7 +3292,7 @@ class ModuleAutoGen(AutoGen):
'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [], 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [], 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
'module_arch' : self.Arch, 'module_arch' : self.Arch,
'package_item' : ['%s' % (Package.MetaFile.File.replace('\\','/')) for Package in Packages], 'package_item' : ['%s' % (Package.MetaFile.File.replace('\\', '/')) for Package in Packages],
'binary_item' : [], 'binary_item' : [],
'patchablepcd_item' : [], 'patchablepcd_item' : [],
'pcd_item' : [], 'pcd_item' : [],
@ -3316,27 +3316,27 @@ class ModuleAutoGen(AutoGen):
if 'PI_SPECIFICATION_VERSION' in self.Specification: if 'PI_SPECIFICATION_VERSION' in self.Specification:
AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']] AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']]
OutputDir = self.OutputDir.replace('\\','/').strip('/') OutputDir = self.OutputDir.replace('\\', '/').strip('/')
if self.ModuleType in ['BASE', 'USER_DEFINED']: if self.ModuleType in ['BASE', 'USER_DEFINED']:
for Item in self.CodaTargetList: for Item in self.CodaTargetList:
File = Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/') File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
if Item.Target.Ext.lower() == '.aml': if Item.Target.Ext.lower() == '.aml':
AsBuiltInfDict['binary_item'] += ['ASL|' + File] AsBuiltInfDict['binary_item'] += ['ASL|' + File]
elif Item.Target.Ext.lower() == '.acpi': elif Item.Target.Ext.lower() == '.acpi':
AsBuiltInfDict['binary_item'] += ['ACPI|' + File] AsBuiltInfDict['binary_item'] += ['ACPI|' + File]
else: else:
AsBuiltInfDict['binary_item'] += ['BIN|' + File] AsBuiltInfDict['binary_item'] += ['BIN|' + File]
else: else:
for Item in self.CodaTargetList: for Item in self.CodaTargetList:
File = Item.Target.Path.replace('\\','/').strip('/').replace(OutputDir,'').strip('/') File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
if Item.Target.Ext.lower() == '.efi': if Item.Target.Ext.lower() == '.efi':
AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi'] AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi']
else: else:
AsBuiltInfDict['binary_item'] += ['BIN|' + File] AsBuiltInfDict['binary_item'] += ['BIN|' + File]
if self.DepexGenerated: if self.DepexGenerated:
if self.ModuleType in ['PEIM']: if self.ModuleType in ['PEIM']:
AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex'] AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex']
if self.ModuleType in ['DXE_DRIVER','DXE_RUNTIME_DRIVER','DXE_SAL_DRIVER','UEFI_DRIVER']: if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'UEFI_DRIVER']:
AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name + '.depex'] AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name + '.depex']
if self.ModuleType in ['DXE_SMM_DRIVER']: if self.ModuleType in ['DXE_SMM_DRIVER']:
AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex'] AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex']

View File

@ -388,7 +388,7 @@ class BuildRule:
# find the build_rule_version # find the build_rule_version
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) <> -1: if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) <> -1:
if Line.find("=") <> -1 and Line.find("=") < (len(Line)-1) and (Line[(Line.find("=") + 1):]).split(): if Line.find("=") <> -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0] self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]
# skip empty or comment line # skip empty or comment line
if Line == "" or Line[0] == "#": if Line == "" or Line[0] == "#":
@ -470,16 +470,16 @@ class BuildRule:
if TokenList[0] == "BUILD": if TokenList[0] == "BUILD":
if len(TokenList) == 1: if len(TokenList) == 1:
EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section", EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
FileType = TokenList[1] FileType = TokenList[1]
if FileType == '': if FileType == '':
EdkLogger.error("build", FORMAT_INVALID, "No file type given", EdkLogger.error("build", FORMAT_INVALID, "No file type given",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if self._FileTypePattern.match(FileType) == None: if self._FileTypePattern.match(FileType) == None:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type") ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
# new format: File-Type.Build-Type.Arch # new format: File-Type.Build-Type.Arch
else: else:
@ -488,7 +488,7 @@ class BuildRule:
elif FileType != TokenList[0]: elif FileType != TokenList[0]:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Different file types are not allowed in the same rule section", "Different file types are not allowed in the same rule section",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1: if len(TokenList) > 1:
BuildType = TokenList[1] BuildType = TokenList[1]
@ -502,12 +502,12 @@ class BuildRule:
if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1: if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Specific build types must not be mixed with common one", "Specific build types must not be mixed with common one",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if 'COMMON' in self._ArchList and len(self._ArchList) > 1: if 'COMMON' in self._ArchList and len(self._ArchList) > 1:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Specific ARCH must not be mixed with common one", "Specific ARCH must not be mixed with common one",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
self._FileType = FileType self._FileType = FileType
@ -531,7 +531,7 @@ class BuildRule:
elif SectionType != Type: elif SectionType != Type:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Two different section types are not allowed in the same sub-section", "Two different section types are not allowed in the same sub-section",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1: if len(TokenList) > 1:
@ -548,10 +548,10 @@ class BuildRule:
if 'COMMON' in FamilyList and len(FamilyList) > 1: if 'COMMON' in FamilyList and len(FamilyList) > 1:
EdkLogger.error("build", FORMAT_INVALID, EdkLogger.error("build", FORMAT_INVALID,
"Specific tool chain family should not be mixed with general one", "Specific tool chain family should not be mixed with general one",
File=self.RuleFile, Line=LineIndex+1, File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex]) ExtraData=self.RuleContent[LineIndex])
if self._State not in self._StateHandler: if self._State not in self._StateHandler:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex]) ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])
## Parse <InputFile> sub-section ## Parse <InputFile> sub-section
# #

View File

@ -286,7 +286,7 @@ class DependencyExpression:
# don't generate depex if only TRUE operand left # don't generate depex if only TRUE operand left
if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and NewOperand[0] == 'TRUE': if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and NewOperand[0] == 'TRUE':
self.PostfixNotation = [] self.PostfixNotation = []
return return
# don't generate depex if all operands are architecture protocols # don't generate depex if all operands are architecture protocols
if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \ if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \
@ -424,7 +424,7 @@ def Main():
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize) Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
if Option.OutputFile != None: if Option.OutputFile != None:
FileChangeFlag = Dpx.Generate(Option.OutputFile) FileChangeFlag = Dpx.Generate(Option.OutputFile)
if not FileChangeFlag and DxsFile: if not FileChangeFlag and DxsFile:
# #
# Touch the output file if its time stamp is older than the original # Touch the output file if its time stamp is older than the original
# DXS file to avoid re-invoke this tool for the dependency check in build rule. # DXS file to avoid re-invoke this tool for the dependency check in build rule.

View File

@ -27,7 +27,7 @@ from BuildEngine import *
import Common.GlobalData as GlobalData import Common.GlobalData as GlobalData
## Regular expression for finding header file inclusions ## Regular expression for finding header file inclusions
gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE|re.UNICODE|re.IGNORECASE) gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
## Regular expression for matching macro used in header file inclusion ## Regular expression for matching macro used in header file inclusion
gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE) gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
@ -499,7 +499,7 @@ cleanlib:
# convert source files and binary files to build targets # convert source files and binary files to build targets
self.ResultFileList = [str(T.Target) for T in self._AutoGenObject.CodaTargetList] self.ResultFileList = [str(T.Target) for T in self._AutoGenObject.CodaTargetList]
if len(self.ResultFileList) == 0 and len(self._AutoGenObject.SourceFileList) <> 0: if len(self.ResultFileList) == 0 and len(self._AutoGenObject.SourceFileList) <> 0:
EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build", EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
ExtraData="[%s]" % str(self._AutoGenObject)) ExtraData="[%s]" % str(self._AutoGenObject))
@ -520,9 +520,9 @@ cleanlib:
FileMacro = "" FileMacro = ""
IncludePathList = [] IncludePathList = []
for P in self._AutoGenObject.IncludePathList: for P in self._AutoGenObject.IncludePathList:
IncludePathList.append(IncPrefix+self.PlaceMacro(P, self.Macros)) IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros: if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix+P) self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
FileMacro += self._FILE_MACRO_TEMPLATE.Replace( FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
{ {
"macro_name" : "INC", "macro_name" : "INC",
@ -533,7 +533,7 @@ cleanlib:
# Generate macros used to represent files containing list of input files # Generate macros used to represent files containing list of input files
for ListFileMacro in self.ListFileMacros: for ListFileMacro in self.ListFileMacros:
ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro)-5]) ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName)) FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
SaveFileOnChange( SaveFileOnChange(
ListFileName, ListFileName,
@ -767,7 +767,7 @@ cleanlib:
try: try:
Fd = open(F.Path, 'r') Fd = open(F.Path, 'r')
except BaseException, X: except BaseException, X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path+"\n\t"+str(X)) EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
FileContent = Fd.read() FileContent = Fd.read()
Fd.close() Fd.close()

View File

@ -784,11 +784,11 @@ def BuildExDataBase(Dict):
DbTotal = [InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable, DbTotal = [InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen, PcdTokenTable,PcdCNameTable, LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen, PcdTokenTable,PcdCNameTable,
SizeTableValue, InitValueUint16, VardefValueUint16,InitValueUint8, VardefValueUint8, InitValueBoolean, SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
VardefValueBoolean, SkuidValue, SkuIndexValue, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean] VardefValueBoolean, SkuidValue, SkuIndexValue, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
DbItemTotal = [DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable, DbItemTotal = [DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable, DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16,DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean, DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbSkuidValue, DbSkuIndexValue, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean] DbVardefValueBoolean, DbSkuidValue, DbSkuIndexValue, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
# SkuidValue is the last table in the init table items # SkuidValue is the last table in the init table items
@ -1343,7 +1343,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure) Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"': elif Sku.DefaultValue[0] == '"':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue) DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue) -2 + 1 Size = len(Sku.DefaultValue) - 2 + 1
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure) Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{': elif Sku.DefaultValue[0] == '{':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue) DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
@ -1375,7 +1375,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Pcd.InitString = 'INIT' Pcd.InitString = 'INIT'
else: else:
if int(Sku.DefaultValue, 0) != 0: if int(Sku.DefaultValue, 0) != 0:
Pcd.InitString = 'INIT' Pcd.InitString = 'INIT'
# #
# For UNIT64 type PCD's value, ULL should be append to avoid # For UNIT64 type PCD's value, ULL should be append to avoid
# warning under linux building environment. # warning under linux building environment.

View File

@ -113,7 +113,7 @@ def DecToHexStr(Dec, Digit = 8):
# @retval: A list for formatted hex string # @retval: A list for formatted hex string
# #
def DecToHexList(Dec, Digit = 8): def DecToHexList(Dec, Digit = 8):
Hex = eval("'%0" + str(Digit) + "X' % int(Dec)" ) Hex = eval("'%0" + str(Digit) + "X' % int(Dec)")
List = [] List = []
for Bit in range(Digit - 2, -1, -2): for Bit in range(Digit - 2, -1, -2):
List.append(HexHeader + Hex[Bit:Bit + 2]) List.append(HexHeader + Hex[Bit:Bit + 2])
@ -192,7 +192,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
UnusedStr = WriteLine(UnusedStr, Line) UnusedStr = WriteLine(UnusedStr, Line)
Str = ''.join([Str,UnusedStr]) Str = ''.join([Str, UnusedStr])
Str = WriteLine(Str, '') Str = WriteLine(Str, '')
if IsCompatibleMode or UniGenCFlag: if IsCompatibleMode or UniGenCFlag:
@ -235,7 +235,7 @@ def CreateCFileHeader():
# #
def CreateBinBuffer(BinBuffer, Array): def CreateBinBuffer(BinBuffer, Array):
for Item in Array: for Item in Array:
BinBuffer.write(pack("B", int(Item,16))) BinBuffer.write(pack("B", int(Item, 16)))
## Create a formatted string all items in an array ## Create a formatted string all items in an array
# #
@ -258,7 +258,7 @@ def CreateArrayItem(Array, Width = 16):
Index = Index + 1 Index = Index + 1
else: else:
ArrayItem = WriteLine(ArrayItem, Line) ArrayItem = WriteLine(ArrayItem, Line)
Line = ' ' + Item + ', ' Line = ' ' + Item + ', '
Index = 1 Index = 1
ArrayItem = Write(ArrayItem, Line.rstrip()) ArrayItem = Write(ArrayItem, Line.rstrip())
@ -320,7 +320,7 @@ def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
if PrimaryTag == UniLanguagePrimaryTag: if PrimaryTag == UniLanguagePrimaryTag:
if UniLanguage not in UniLanguageListFiltered: if UniLanguage not in UniLanguageListFiltered:
UniLanguageListFiltered += [UniLanguage] UniLanguageListFiltered += [UniLanguage]
break break
else: else:
# Here is rule 3 for "get best language" # Here is rule 3 for "get best language"
@ -368,7 +368,7 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
UniLanguageList = [] UniLanguageList = []
for IndexI in range(len(UniObjectClass.LanguageDef)): for IndexI in range(len(UniObjectClass.LanguageDef)):
UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]] UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList) UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
@ -450,14 +450,14 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
if UniBinBuffer: if UniBinBuffer:
CreateBinBuffer (UniBinBuffer, List) CreateBinBuffer (UniBinBuffer, List)
UniBinBuffer.write (StringBuffer.getvalue()) UniBinBuffer.write (StringBuffer.getvalue())
UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END,16))) UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
StringBuffer.close() StringBuffer.close()
# #
# Create line for string variable name # Create line for string variable name
# "unsigned char $(BaseName)Strings[] = {" # "unsigned char $(BaseName)Strings[] = {"
# #
AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n' ) AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')
if IsCompatibleMode: if IsCompatibleMode:
# #
@ -618,13 +618,13 @@ def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, Ski
# Write an item # Write an item
# #
def Write(Target, Item): def Write(Target, Item):
return ''.join([Target,Item]) return ''.join([Target, Item])
# #
# Write an item with a break line # Write an item with a break line
# #
def WriteLine(Target, Item): def WriteLine(Target, Item):
return ''.join([Target,Item,'\n']) return ''.join([Target, Item, '\n'])
# This acts like the main() function for the script, unless it is 'import'ed into another # This acts like the main() function for the script, unless it is 'import'ed into another
# script. # script.

View File

@ -248,7 +248,7 @@ class UniFileClassObject(object):
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File); EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
LineNo = GetLineNo(FileIn, Line, False) LineNo = GetLineNo(FileIn, Line, False)
EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition", EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File = File, Line = LineNo) ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
else: else:
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File) LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
LangPrintName = Lang[2] LangPrintName = Lang[2]
@ -352,7 +352,7 @@ class UniFileClassObject(object):
if Name != '': if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE) MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString == None or MatchString.end(0) != len(Name): if MatchString == None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File)) EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File))
LanguageList = Item.split(u'#language ') LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)): for IndexI in range(len(LanguageList)):
if IndexI == 0: if IndexI == 0:
@ -512,7 +512,7 @@ class UniFileClassObject(object):
if not self.IsCompatibleMode and Name != '': if not self.IsCompatibleMode and Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE) MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString == None or MatchString.end(0) != len(Name): if MatchString == None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File)) EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File))
self.AddStringToList(Name, Language, Value) self.AddStringToList(Name, Language, Value)
continue continue
@ -571,7 +571,7 @@ class UniFileClassObject(object):
ItemIndexInList = self.OrderedStringDict[Language][Name] ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList] Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value) Item.UpdateValue(Value)
Item.UseOtherLangDef = '' Item.UseOtherLangDef = ''
if IsAdded: if IsAdded:
Token = len(self.OrderedStringList[Language]) Token = len(self.OrderedStringList[Language])

View File

@ -48,19 +48,19 @@ class PcdEntry:
self.PcdBinSize = PcdBinSize self.PcdBinSize = PcdBinSize
if self.PcdValue == '' : if self.PcdValue == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" %(self.PcdCName, self.FileName, self.Lineno)) "Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdOffset == '' : if self.PcdOffset == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" %(self.PcdCName, self.FileName, self.Lineno)) "Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdSize == '' : if self.PcdSize == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" %(self.PcdCName, self.FileName, self.Lineno)) "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))
self._GenOffsetValue () self._GenOffsetValue ()
## Analyze the string value to judge the PCD's datum type euqal to Boolean or not. ## Analyze the string value to judge the PCD's datum type euqal to Boolean or not.
# #
# @param ValueString PCD's value # @param ValueString PCD's value
@ -74,10 +74,10 @@ class PcdEntry:
if ValueString.upper() in ["TRUE", "FALSE"]: if ValueString.upper() in ["TRUE", "FALSE"]:
return True return True
elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]: elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:
return True return True
return False return False
## Convert the PCD's value from string to integer. ## Convert the PCD's value from string to integer.
# #
# This function will try to convert the Offset value form string to integer # This function will try to convert the Offset value form string to integer
@ -91,9 +91,9 @@ class PcdEntry:
try: try:
self.PcdBinOffset = int(self.PcdOffset, 16) self.PcdBinOffset = int(self.PcdOffset, 16)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno)) "Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
## Pack Boolean type VPD PCD's value form string to binary type. ## Pack Boolean type VPD PCD's value form string to binary type.
# #
# @param ValueString The boolean type string for pack. # @param ValueString The boolean type string for pack.
@ -101,18 +101,18 @@ class PcdEntry:
# #
def _PackBooleanValue(self, ValueString): def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]: if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
try: try:
self.PcdValue = pack(_FORMAT_CHAR[1], 1) self.PcdValue = pack(_FORMAT_CHAR[1], 1)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
else: else:
try: try:
self.PcdValue = pack(_FORMAT_CHAR[1], 0) self.PcdValue = pack(_FORMAT_CHAR[1], 0)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack Integer type VPD PCD's value form string to binary type. ## Pack Integer type VPD PCD's value form string to binary type.
# #
# @param ValueString The Integer type string for pack. # @param ValueString The Integer type string for pack.
@ -120,46 +120,46 @@ class PcdEntry:
# #
def _PackIntValue(self, IntValue, Size): def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR.keys(): if Size not in _FORMAT_CHAR.keys():
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno)) "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
if Size == 1: if Size == 1:
if IntValue < 0: if IntValue < 0:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD %s in UINT8 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "PCD can't be set to negative value %d for PCD %s in UINT8 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif IntValue >= 0x100: elif IntValue >= 0x100:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type UINT8 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "Too large PCD value %d for datum type UINT8 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif Size == 2: elif Size == 2:
if IntValue < 0: if IntValue < 0:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD %s in UINT16 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "PCD can't be set to negative value %d for PCD %s in UINT16 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif IntValue >= 0x10000: elif IntValue >= 0x10000:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type UINT16 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "Too large PCD value %d for datum type UINT16 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif Size == 4: elif Size == 4:
if IntValue < 0: if IntValue < 0:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif IntValue >= 0x100000000: elif IntValue >= 0x100000000:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif Size == 8: elif Size == 8:
if IntValue < 0: if IntValue < 0:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
elif IntValue >= 0x10000000000000000: elif IntValue >= 0x10000000000000000:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno)) "Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
else: else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno)) "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
try: try:
self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue) self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack VOID* type VPD PCD's value form string to binary type. ## Pack VOID* type VPD PCD's value form string to binary type.
# #
@ -178,53 +178,53 @@ class PcdEntry:
elif ValueString.startswith('"') and ValueString.endswith('"'): elif ValueString.startswith('"') and ValueString.endswith('"'):
self._PackString(ValueString, Size) self._PackString(ValueString, Size)
else: else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno)) "Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
## Pack an Ascii PCD value. ## Pack an Ascii PCD value.
# #
# An Ascii string for a PCD should be in format as "". # An Ascii string for a PCD should be in format as "".
# #
def _PackString(self, ValueString, Size): def _PackString(self, ValueString, Size):
if (Size < 0): if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno)) "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""): if (ValueString == ""):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno)) EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
if (len(ValueString) < 2): if (len(ValueString) < 2):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno)) EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))
ValueString = ValueString[1:-1] ValueString = ValueString[1:-1]
if len(ValueString) + 1 > Size: if len(ValueString) + 1 > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno)) "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try: try:
self.PcdValue= pack('%ds' % Size, ValueString) self.PcdValue = pack('%ds' % Size, ValueString)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno)) "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack a byte-array PCD value. ## Pack a byte-array PCD value.
# #
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}. # A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
# #
def _PackByteArray(self, ValueString, Size): def _PackByteArray(self, ValueString, Size):
if (Size < 0): if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno)) EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""): if (ValueString == ""):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno)) EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
ValueString = ValueString.strip() ValueString = ValueString.strip()
ValueString = ValueString.lstrip('{').strip('}') ValueString = ValueString.lstrip('{').strip('}')
ValueList = ValueString.split(',') ValueList = ValueString.split(',')
ValueList = [item.strip() for item in ValueList] ValueList = [item.strip() for item in ValueList]
if len(ValueList) > Size: if len(ValueList) > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno)) "The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B') ReturnArray = array.array('B')
for Index in xrange(len(ValueList)): for Index in xrange(len(ValueList)):
Value = None Value = None
if ValueList[Index].lower().startswith('0x'): if ValueList[Index].lower().startswith('0x'):
@ -232,7 +232,7 @@ class PcdEntry:
try: try:
Value = int(ValueList[Index], 16) Value = int(ValueList[Index], 16)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \ "The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno)) (ValueList[Index], ValueString, self.FileName, self.Lineno))
else: else:
@ -243,52 +243,52 @@ class PcdEntry:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \ "The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno)) (ValueList[Index], ValueString, self.FileName, self.Lineno))
if Value > 255: if Value > 255:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" %\ "The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno)) (ValueList[Index], ValueString, self.FileName, self.Lineno))
ReturnArray.append(Value) ReturnArray.append(Value)
for Index in xrange(len(ValueList), Size): for Index in xrange(len(ValueList), Size):
ReturnArray.append(0) ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist() self.PcdValue = ReturnArray.tolist()
## Pack a unicode PCD value into byte array. ## Pack a unicode PCD value into byte array.
# #
# A unicode string for a PCD should be in format as L"". # A unicode string for a PCD should be in format as L"".
# #
def _PackUnicode(self, UnicodeString, Size): def _PackUnicode(self, UnicodeString, Size):
if (Size < 0): if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" %\ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % \
(self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno)) (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (len(UnicodeString) < 3): if (len(UnicodeString) < 3):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" %\ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % \
(self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno)) (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))
UnicodeString = UnicodeString[2:-1] UnicodeString = UnicodeString[2:-1]
if (len(UnicodeString) + 1) * 2 > Size: if (len(UnicodeString) + 1) * 2 > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW, EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \ "The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \
(UnicodeString, Size, self.FileName, self.Lineno)) (UnicodeString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B') ReturnArray = array.array('B')
for Value in UnicodeString: for Value in UnicodeString:
try: try:
ReturnArray.append(ord(Value)) ReturnArray.append(ord(Value))
ReturnArray.append(0) ReturnArray.append(0)
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \ "Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
(Value, UnicodeString, self.FileName, self.Lineno)) (Value, UnicodeString, self.FileName, self.Lineno))
for Index in xrange(len(UnicodeString) * 2, Size): for Index in xrange(len(UnicodeString) * 2, Size):
ReturnArray.append(0) ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist() self.PcdValue = ReturnArray.tolist()
@ -300,7 +300,7 @@ class PcdEntry:
# 3. Fixed offset if needed; # 3. Fixed offset if needed;
# 4. Generate output file, including guided.map and guided.bin file; # 4. Generate output file, including guided.map and guided.bin file;
# #
class GenVPD : class GenVPD :
## Constructor of DscBuildData ## Constructor of DscBuildData
# #
# Initialize object of GenVPD # Initialize object of GenVPD
@ -322,47 +322,47 @@ class GenVPD :
try: try:
self.FileLinesList = fInputfile.readlines() self.FileLinesList = fInputfile.readlines()
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" %InputFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" % InputFileName, None)
finally: finally:
fInputfile.close() fInputfile.close()
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %InputFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
## ##
# Parser the input file which is generated by the build tool. Convert the value of each pcd's # Parser the input file which is generated by the build tool. Convert the value of each pcd's
# from string to it's real format. Also remove the useless line in the input file. # from string to it's real format. Also remove the useless line in the input file.
# #
def ParserInputFile (self): def ParserInputFile (self):
count = 0 count = 0
for line in self.FileLinesList: for line in self.FileLinesList:
# Strip "\r\n" generated by readlines (). # Strip "\r\n" generated by readlines ().
line = line.strip() line = line.strip()
line = line.rstrip(os.linesep) line = line.rstrip(os.linesep)
# Skip the comment line # Skip the comment line
if (not line.startswith("#")) and len(line) > 1 : if (not line.startswith("#")) and len(line) > 1 :
# #
# Enhanced for support "|" character in the string. # Enhanced for support "|" character in the string.
# #
ValueList = ['', '', '', '',''] ValueList = ['', '', '', '','']
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(line) PtrValue = ValueRe.findall(line)
ValueUpdateFlag = False ValueUpdateFlag = False
if len(PtrValue) >= 1: if len(PtrValue) >= 1:
line = re.sub(ValueRe, '', line) line = re.sub(ValueRe, '', line)
ValueUpdateFlag = True ValueUpdateFlag = True
TokenList = line.split('|') TokenList = line.split('|')
ValueList[0:len(TokenList)] = TokenList ValueList[0:len(TokenList)] = TokenList
if ValueUpdateFlag: if ValueUpdateFlag:
ValueList[4] = PtrValue[0] ValueList[4] = PtrValue[0]
self.FileLinesList[count] = ValueList self.FileLinesList[count] = ValueList
# Store the line number # Store the line number
self.FileLinesList[count].append(str(count+1)) self.FileLinesList[count].append(str(count + 1))
elif len(line) <= 1 : elif len(line) <= 1 :
# Set the blank line to "None" # Set the blank line to "None"
self.FileLinesList[count] = None self.FileLinesList[count] = None
@ -370,9 +370,9 @@ class GenVPD :
# Set the comment line to "None" # Set the comment line to "None"
self.FileLinesList[count] = None self.FileLinesList[count] = None
count += 1 count += 1
# The line count contain usage information # The line count contain usage information
count = 0 count = 0
# Delete useless lines # Delete useless lines
while (True) : while (True) :
try : try :
@ -381,18 +381,18 @@ class GenVPD :
else : else :
count += 1 count += 1
except : except :
break break
# #
# After remove the useless line, if there are no data remain in the file line list, # After remove the useless line, if there are no data remain in the file line list,
# Report warning messages to user's. # Report warning messages to user's.
# #
if len(self.FileLinesList) == 0 : if len(self.FileLinesList) == 0 :
EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE, EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file, Please check it.") "There are no VPD type pcds defined in DSC file, Please check it.")
# Process the pcds one by one base on the pcd's value and size # Process the pcds one by one base on the pcd's value and size
count = 0 count = 0
for line in self.FileLinesList: for line in self.FileLinesList:
if line != None : if line != None :
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName) PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName)
# Strip the space char # Strip the space char
@ -421,7 +421,7 @@ class GenVPD :
PCD.PcdBinSize = PackSize PCD.PcdBinSize = PackSize
except: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno)) EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize): if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
PCD._PackBooleanValue(PCD.PcdValue) PCD._PackBooleanValue(PCD.PcdValue)
self.FileLinesList[count] = PCD self.FileLinesList[count] = PCD
@ -431,7 +431,7 @@ class GenVPD :
# Try to translate value to an integer firstly. # Try to translate value to an integer firstly.
# #
IsInteger = True IsInteger = True
PackValue = None PackValue = None
try: try:
PackValue = int(PCD.PcdValue) PackValue = int(PCD.PcdValue)
except: except:
@ -439,23 +439,23 @@ class GenVPD :
PackValue = int(PCD.PcdValue, 16) PackValue = int(PCD.PcdValue, 16)
except: except:
IsInteger = False IsInteger = False
if IsInteger: if IsInteger:
PCD._PackIntValue(PackValue, PackSize) PCD._PackIntValue(PackValue, PackSize)
else: else:
PCD._PackPtrValue(PCD.PcdValue, PackSize) PCD._PackPtrValue(PCD.PcdValue, PackSize)
self.FileLinesList[count] = PCD self.FileLinesList[count] = PCD
count += 1 count += 1
else : else :
continue continue
## ##
# This function used to create a clean list only contain useful information and reorganized to make it # This function used to create a clean list only contain useful information and reorganized to make it
# easy to be sorted # easy to be sorted
# #
def FormatFileLine (self) : def FormatFileLine (self) :
for eachPcd in self.FileLinesList : for eachPcd in self.FileLinesList :
if eachPcd.PcdOffset != '*' : if eachPcd.PcdOffset != '*' :
# Use pcd's Offset value as key, and pcd's Value as value # Use pcd's Offset value as key, and pcd's Value as value
@ -463,43 +463,43 @@ class GenVPD :
else : else :
# Use pcd's CName as key, and pcd's Size as value # Use pcd's CName as key, and pcd's Size as value
self.PcdUnknownOffsetList.append(eachPcd) self.PcdUnknownOffsetList.append(eachPcd)
## ##
# This function is use to fix the offset value which the not specified in the map file. # This function is use to fix the offset value which the not specified in the map file.
# Usually it use the star (meaning any offset) character in the offset field # Usually it use the star (meaning any offset) character in the offset field
# #
def FixVpdOffset (self): def FixVpdOffset (self):
# At first, the offset should start at 0 # At first, the offset should start at 0
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset # Sort fixed offset list in order to find out where has free spaces for the pcd's offset
# value is "*" to insert into. # value is "*" to insert into.
self.PcdFixedOffsetSizeList.sort(lambda x,y: cmp(x.PcdBinOffset, y.PcdBinOffset)) self.PcdFixedOffsetSizeList.sort(lambda x, y: cmp(x.PcdBinOffset, y.PcdBinOffset))
# #
# Sort the un-fixed pcd's offset by it's size. # Sort the un-fixed pcd's offset by it's size.
# #
self.PcdUnknownOffsetList.sort(lambda x,y: cmp(x.PcdBinSize, y.PcdBinSize)) self.PcdUnknownOffsetList.sort(lambda x, y: cmp(x.PcdBinSize, y.PcdBinSize))
# #
# Process all Offset value are "*" # Process all Offset value are "*"
# #
if (len(self.PcdFixedOffsetSizeList) == 0) and (len(self.PcdUnknownOffsetList) != 0) : if (len(self.PcdFixedOffsetSizeList) == 0) and (len(self.PcdUnknownOffsetList) != 0) :
# The offset start from 0 # The offset start from 0
NowOffset = 0 NowOffset = 0
for Pcd in self.PcdUnknownOffsetList : for Pcd in self.PcdUnknownOffsetList :
Pcd.PcdBinOffset = NowOffset Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset)) Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdBinSize NowOffset += Pcd.PcdBinSize
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
return return
# Check the offset of VPD type pcd's offset start from 0. # Check the offset of VPD type pcd's offset start from 0.
if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 : if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.", EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
None) None)
# Judge whether the offset in fixed pcd offset list is overlapped or not. # Judge whether the offset in fixed pcd offset list is overlapped or not.
lenOfList = len(self.PcdFixedOffsetSizeList) lenOfList = len(self.PcdFixedOffsetSizeList)
count = 0 count = 0
@ -508,22 +508,22 @@ class GenVPD :
PcdNext = self.PcdFixedOffsetSizeList[count+1] PcdNext = self.PcdFixedOffsetSizeList[count+1]
# Two pcd's offset is same # Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset : if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is same with %s at line: %s in file %s" %\ "The offset of %s at line: %s is same with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName), (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None) None)
# Overlapped # Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset : if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is overlapped with %s at line: %s in file %s" %\ "The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName), (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None) None)
# Has free space, raise a warning message # Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset : if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE, EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s at line: %s and %s at line: %s in file %s" %\ "The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName), (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None) None)
count += 1 count += 1
@ -545,7 +545,7 @@ class GenVPD :
if LastOffset < NowOffset : if LastOffset < NowOffset :
if lenOfUnfixedList != 0 : if lenOfUnfixedList != 0 :
countOfUnfixedList = 0 countOfUnfixedList = 0
while(countOfUnfixedList < lenOfUnfixedList) : while(countOfUnfixedList < lenOfUnfixedList) :
eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList] eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList]
needFixPcdSize = eachUnfixedPcd.PcdBinSize needFixPcdSize = eachUnfixedPcd.PcdBinSize
# Not been fixed # Not been fixed
@ -586,8 +586,8 @@ class GenVPD :
FixOffsetSizeListCount += 1 FixOffsetSizeListCount += 1
# Usually it will not enter into this thunk, if so, means it overlapped. # Usually it will not enter into this thunk, if so, means it overlapped.
else : else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" %\ "The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno), (eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None) None)
FixOffsetSizeListCount += 1 FixOffsetSizeListCount += 1
@ -618,46 +618,46 @@ class GenVPD :
#Open an VPD file to process #Open an VPD file to process
try: try:
fVpdFile = open (BinFileName, "wb", 0) fVpdFile = open(BinFileName, "wb", 0)
except: except:
# Open failed # Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.VpdFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
try : try :
fMapFile = open (MapFileName, "w", 0) fMapFile = open(MapFileName, "w", 0)
except: except:
# Open failed # Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.MapFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of StringIO to cache data # Use a instance of StringIO to cache data
fStringIO = StringIO.StringIO('') fStringIO = StringIO.StringIO('')
# Write the header of map file. # Write the header of map file.
try : try :
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n") fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
for eachPcd in self.PcdFixedOffsetSizeList : for eachPcd in self.PcdFixedOffsetSizeList :
# write map file # write map file
try : try :
fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue)) fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue))
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
# Write Vpd binary file # Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset) fStringIO.seek (eachPcd.PcdBinOffset)
if isinstance(eachPcd.PcdValue, list): if isinstance(eachPcd.PcdValue, list):
ValueList = [chr(Item) for Item in eachPcd.PcdValue] ValueList = [chr(Item) for Item in eachPcd.PcdValue]
fStringIO.write(''.join(ValueList)) fStringIO.write(''.join(ValueList))
else: else:
fStringIO.write (eachPcd.PcdValue) fStringIO.write (eachPcd.PcdValue)
try : try :
fVpdFile.write (fStringIO.getvalue()) fVpdFile.write (fStringIO.getvalue())
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.VpdFileName,None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None)
fStringIO.close () fStringIO.close ()
fVpdFile.close () fVpdFile.close ()
fMapFile.close () fMapFile.close ()

View File

@ -27,19 +27,19 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
# #
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
Keys = [] Keys = []
for Line in F: for Line in F:
if Line.startswith(CommentCharacter): if Line.startswith(CommentCharacter):
continue continue
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].split() Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys: if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
if ValueSplitFlag: if ValueSplitFlag:
Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter) Dictionary[Key[0]] = LineList[1].replace('\\', '/').split(ValueSplitCharacter)
else: else:
Dictionary[Key[0]] = LineList[1].strip().replace('\\','/') Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')
Keys += [Key[0]] Keys += [Key[0]]
F.close() F.close()
return 0 return 0

View File

@ -59,7 +59,7 @@ class EdkIIWorkspace:
# #
# Load TianoCoreOrgLogo, used for GUI tool # Load TianoCoreOrgLogo, used for GUI tool
# #
self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF) self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'), wx.BITMAP_TYPE_GIF)
except: except:
self.Icon = None self.Icon = None
@ -151,7 +151,7 @@ class EdkIIWorkspace:
def XmlParseFileSection (self, FileName, SectionTag): def XmlParseFileSection (self, FileName, SectionTag):
if self.Verbose: if self.Verbose:
print FileName print FileName
return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag) return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
## Save a XML file ## Save a XML file
# #
@ -219,19 +219,19 @@ class EdkIIWorkspace:
# #
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
except: except:
return False return False
Keys = [] Keys = []
for Line in F: for Line in F:
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].split() Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys: if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
if ValueSplitFlag: if ValueSplitFlag:
Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter) Dictionary[Key[0]] = LineList[1].replace('\\', '/').split(ValueSplitCharacter)
else: else:
Dictionary[Key[0]] = LineList[1].strip().replace('\\','/') Dictionary[Key[0]] = LineList[1].strip().replace('\\', '/')
Keys += [Key[0]] Keys += [Key[0]]
F.close() F.close()
return True return True
@ -252,7 +252,7 @@ def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplit
# #
def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
Lines = [] Lines = []
Lines = F.readlines() Lines = F.readlines()
F.close() F.close()
@ -265,7 +265,7 @@ def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplit
MaxLength = len(Key) MaxLength = len(Key)
Index = 0 Index = 0
for Line in Lines: for Line in Lines:
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].split() Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary: if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary:
@ -275,17 +275,17 @@ def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplit
Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]]) Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]])
Lines.pop(Index) Lines.pop(Index)
if Key[0] in Keys: if Key[0] in Keys:
Lines.insert(Index,Line) Lines.insert(Index, Line)
Keys.remove(Key[0]) Keys.remove(Key[0])
Index += 1 Index += 1
for RemainingKey in Keys: for RemainingKey in Keys:
if ValueSplitFlag: if ValueSplitFlag:
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter,' '.join(Dictionary[RemainingKey])) Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, ' '.join(Dictionary[RemainingKey]))
else: else:
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey]) Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey])
Lines.append(Line) Lines.append(Line)
try: try:
F = open(FileName,'w') F = open(FileName, 'w')
except: except:
return False return False
F.writelines(Lines) F.writelines(Lines)

View File

@ -69,8 +69,8 @@ class Warning (Exception):
# @param File The FDF name # @param File The FDF name
# @param Line The Line number that error occurs # @param Line The Line number that error occurs
# #
def __init__(self, Str, File = None, Line = None): def __init__(self, Str, File=None, Line=None):
FileLineTuple = GetRealFileLine(File, Line) FileLineTuple = GetRealFileLine(File, Line)
self.FileName = FileLineTuple[0] self.FileName = FileLineTuple[0]
self.LineNumber = FileLineTuple[1] self.LineNumber = FileLineTuple[1]
@ -359,8 +359,8 @@ class FdfParser(object):
else: else:
raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber) raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber)
return Str return Str
def __ReplaceFragment(self, StartPos, EndPos, Value = ' '): def __ReplaceFragment(self, StartPos, EndPos, Value=' '):
if StartPos[0] == EndPos[0]: if StartPos[0] == EndPos[0]:
Offset = StartPos[1] Offset = StartPos[1]
while Offset <= EndPos[1]: while Offset <= EndPos[1]:

View File

@ -423,7 +423,7 @@ def StoreHeader(TextFile, CommonHeader):
Description = CommonHeader.Description Description = CommonHeader.Description
License = CommonHeader.License License = CommonHeader.License
Header = "#/** @file\n#\n" Header = "#/** @file\n#\n"
Header += "# " + Abstract + "\n#\n" Header += "# " + Abstract + "\n#\n"
Header += "# " + Description.strip().replace("\n", "\n# ") + "\n" Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
Header += "# " + CopyRight + "\n#\n" Header += "# " + CopyRight + "\n#\n"
@ -519,7 +519,7 @@ def GetXmlFileInfo(FileName, TagTuple):
# @retval Options A optparse object containing the parsed options. # @retval Options A optparse object containing the parsed options.
# @retval InputFile Path of an source file to be migrated. # @retval InputFile Path of an source file to be migrated.
# #
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0): def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
# use clearer usage to override default usage message # use clearer usage to override default usage message
UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
Version = "%s Version %.2f" % (ToolName, VersionNumber) Version = "%s Version %.2f" % (ToolName, VersionNumber)

View File

@ -38,7 +38,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws from Common.MultipleWorkspace import MultipleWorkspace as mws
## Regular expression used to find out place holders in string template ## Regular expression used to find out place holders in string template
gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE) gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE)
## Dictionary used to store file time stamp for quick re-access ## Dictionary used to store file time stamp for quick re-access
gFileTimeStampCache = {} # {file path : file time stamp} gFileTimeStampCache = {} # {file path : file time stamp}
@ -293,11 +293,11 @@ def ProcessVariableArgument(Option, OptionString, Value, Parser):
def GuidStringToGuidStructureString(Guid): def GuidStringToGuidStructureString(Guid):
GuidList = Guid.split('-') GuidList = Guid.split('-')
Result = '{' Result = '{'
for Index in range(0,3,1): for Index in range(0, 3, 1):
Result = Result + '0x' + GuidList[Index] + ', ' Result = Result + '0x' + GuidList[Index] + ', '
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4] Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
for Index in range(0,12,2): for Index in range(0, 12, 2):
Result = Result + ', 0x' + GuidList[4][Index:Index+2] Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
Result += '}}' Result += '}}'
return Result return Result
@ -494,7 +494,7 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True):
Fd.write(Content) Fd.write(Content)
Fd.close() Fd.close()
except IOError, X: except IOError, X:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s'%X) EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
return True return True
@ -613,7 +613,7 @@ class DirCache:
# #
# @retval A list of all files # @retval A list of all files
# #
def GetFiles(Root, SkipList=None, FullPath = True): def GetFiles(Root, SkipList=None, FullPath=True):
OriPath = Root OriPath = Root
FileList = [] FileList = []
for Root, Dirs, Files in os.walk(Root): for Root, Dirs, Files in os.walk(Root):
@ -663,7 +663,7 @@ def RealPath2(File, Dir='', OverrideDir=''):
if OverrideDir[-1] == os.path.sep: if OverrideDir[-1] == os.path.sep:
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)] return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
else: else:
return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)]
if GlobalData.gAllFiles: if GlobalData.gAllFiles:
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))] NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
if not NewFile: if not NewFile:
@ -675,7 +675,7 @@ def RealPath2(File, Dir='', OverrideDir=''):
if Dir[-1] == os.path.sep: if Dir[-1] == os.path.sep:
return NewFile[len(Dir):], NewFile[0:len(Dir)] return NewFile[len(Dir):], NewFile[0:len(Dir)]
else: else:
return NewFile[len(Dir)+1:], NewFile[0:len(Dir)] return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
else: else:
return NewFile, '' return NewFile, ''
@ -701,7 +701,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
# Replace the default dir to current dir # Replace the default dir to current dir
if Dir == '.': if Dir == '.':
Dir = os.getcwd() Dir = os.getcwd()
Dir = Dir[len(Workspace)+1:] Dir = Dir[len(Workspace) + 1:]
# First check if File has Edk definition itself # First check if File has Edk definition itself
if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1: if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:
@ -740,7 +740,7 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.'
# Dir is current module dir related to workspace # Dir is current module dir related to workspace
if Dir == '.': if Dir == '.':
Dir = os.getcwd() Dir = os.getcwd()
Dir = Dir[len(Workspace)+1:] Dir = Dir[len(Workspace) + 1:]
NewFile = File NewFile = File
RelaPath = AllFiles[os.path.normpath(Dir)] RelaPath = AllFiles[os.path.normpath(Dir)]
@ -865,7 +865,7 @@ class TemplateString(object):
# #
# PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint
# #
for PlaceHolder,Start,End in PlaceHolderList: for PlaceHolder, Start, End in PlaceHolderList:
self._SubSectionList.append(TemplateSection[SubSectionStart:Start]) self._SubSectionList.append(TemplateSection[SubSectionStart:Start])
self._SubSectionList.append(TemplateSection[Start:End]) self._SubSectionList.append(TemplateSection[Start:End])
self._PlaceHolderList.append(PlaceHolder) self._PlaceHolderList.append(PlaceHolder)
@ -1251,11 +1251,11 @@ class tdict:
if len(key) > 1: if len(key) > 1:
RestKeys = key[1:] RestKeys = key[1:]
elif self._Level_ > 1: elif self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else: else:
FirstKey = key FirstKey = key
if self._Level_ > 1: if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList: if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:
FirstKey = self._Wildcard FirstKey = self._Wildcard
@ -1328,11 +1328,11 @@ class tdict:
if len(key) > 1: if len(key) > 1:
RestKeys = key[1:] RestKeys = key[1:]
else: else:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else: else:
FirstKey = key FirstKey = key
if self._Level_ > 1: if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey in self._ValidWildcardList: if FirstKey in self._ValidWildcardList:
FirstKey = self._Wildcard FirstKey = self._Wildcard
@ -1437,7 +1437,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
Pair += 1 Pair += 1
elif ch == ')' and not InStr: elif ch == ')' and not InStr:
Pair -= 1 Pair -= 1
if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT: if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT:
NewStr += '-' NewStr += '-'
else: else:
@ -1491,7 +1491,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
IsValid = (len(FieldList) <= 3) IsValid = (len(FieldList) <= 3)
else: else:
IsValid = (len(FieldList) <= 1) IsValid = (len(FieldList) <= 1)
return [Value, Type, Size], IsValid, 0 return [Value, Type, Size], IsValid, 0
elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD): elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):
VpdOffset = FieldList[0] VpdOffset = FieldList[0]
Value = Size = '' Value = Size = ''
@ -1532,17 +1532,17 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
# #
# @retval ValueList: A List contain value, datum type and toke number. # @retval ValueList: A List contain value, datum type and toke number.
# #
def AnalyzePcdData(Setting): def AnalyzePcdData(Setting):
ValueList = ['', '', ''] ValueList = ['', '', '']
ValueRe = re.compile(r'^\s*L?\".*\|.*\"') ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(Setting) PtrValue = ValueRe.findall(Setting)
ValueUpdateFlag = False ValueUpdateFlag = False
if len(PtrValue) >= 1: if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting) Setting = re.sub(ValueRe, '', Setting)
ValueUpdateFlag = True ValueUpdateFlag = True
TokenList = Setting.split(TAB_VALUE_SPLIT) TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList ValueList[0:len(TokenList)] = TokenList
@ -1578,17 +1578,17 @@ def AnalyzeHiiPcdData(Setting):
# #
# @retval ValueList: A List contain VpdOffset, MaxDatumSize and InitialValue. # @retval ValueList: A List contain VpdOffset, MaxDatumSize and InitialValue.
# #
def AnalyzeVpdPcdData(Setting): def AnalyzeVpdPcdData(Setting):
ValueList = ['', '', ''] ValueList = ['', '', '']
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(Setting) PtrValue = ValueRe.findall(Setting)
ValueUpdateFlag = False ValueUpdateFlag = False
if len(PtrValue) >= 1: if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting) Setting = re.sub(ValueRe, '', Setting)
ValueUpdateFlag = True ValueUpdateFlag = True
TokenList = Setting.split(TAB_VALUE_SPLIT) TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList ValueList[0:len(TokenList)] = TokenList
@ -1604,12 +1604,12 @@ def AnalyzeVpdPcdData(Setting):
# #
def CheckPcdDatum(Type, Value): def CheckPcdDatum(Type, Value):
if Type == "VOID*": if Type == "VOID*":
ValueRe = re.compile(r'\s*L?\".*\"\s*$') ValueRe = re.compile(r'\s*L?\".*\"\s*$')
if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"')) if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"'))
or (Value.startswith('{') and Value.endswith('}')) or (Value.startswith('{') and Value.endswith('}'))
): ):
return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\ return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\
", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type)
elif ValueRe.match(Value): elif ValueRe.match(Value):
# Check the chars in UnicodeString or CString is printable # Check the chars in UnicodeString or CString is printable
if Value.startswith("L"): if Value.startswith("L"):
@ -1662,7 +1662,7 @@ def SplitOption(OptionString):
if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]: if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
if Index > OptionStart: if Index > OptionStart:
OptionList.append(OptionString[OptionStart:Index-1]) OptionList.append(OptionString[OptionStart:Index - 1])
OptionStart = Index OptionStart = Index
LastChar = CurrentChar LastChar = CurrentChar
OptionList.append(OptionString[OptionStart:]) OptionList.append(OptionString[OptionStart:])
@ -1739,7 +1739,7 @@ class PathClass(object):
if self.Root[-1] == os.path.sep: if self.Root[-1] == os.path.sep:
self.File = self.Path[len(self.Root):] self.File = self.Path[len(self.Root):]
else: else:
self.File = self.Path[len(self.Root)+1:] self.File = self.Path[len(self.Root) + 1:]
else: else:
self.Path = os.path.normpath(self.File) self.Path = os.path.normpath(self.File)

View File

@ -42,7 +42,7 @@ gDefaultToolsDefFile = "tools_def.txt"
# @var MacroDictionary: To store keys and values defined in DEFINE statement # @var MacroDictionary: To store keys and values defined in DEFINE statement
# #
class ToolDefClassObject(object): class ToolDefClassObject(object):
def __init__(self, FileName = None): def __init__(self, FileName=None):
self.ToolsDefTxtDictionary = {} self.ToolsDefTxtDictionary = {}
self.MacroDictionary = {} self.MacroDictionary = {}
for Env in os.environ: for Env in os.environ:
@ -61,7 +61,7 @@ class ToolDefClassObject(object):
FileContent = [] FileContent = []
if os.path.isfile(FileName): if os.path.isfile(FileName):
try: try:
F = open(FileName,'r') F = open(FileName, 'r')
FileContent = F.readlines() FileContent = F.readlines()
except: except:
EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName) EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
@ -155,7 +155,7 @@ class ToolDefClassObject(object):
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE] KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE]
for Index in range(3,-1,-1): for Index in range(3, -1, -1):
for Key in dict(self.ToolsDefTxtDictionary): for Key in dict(self.ToolsDefTxtDictionary):
List = Key.split('_') List = Key.split('_')
if List[Index] == '*': if List[Index] == '*':

View File

@ -34,7 +34,7 @@ import Common.DataType
import Common.GlobalData as GlobalData import Common.GlobalData as GlobalData
from Common import EdkLogger from Common import EdkLogger
from Common.String import * from Common.String import *
from Common.Misc import DirCache,PathClass from Common.Misc import DirCache, PathClass
from Common.Misc import SaveFileOnChange from Common.Misc import SaveFileOnChange
from Common.Misc import ClearDuplicatedInf from Common.Misc import ClearDuplicatedInf
from Common.Misc import GuidStructureStringToGuidString from Common.Misc import GuidStructureStringToGuidString
@ -93,7 +93,7 @@ def main():
if 'EDK_SOURCE' in os.environ.keys(): if 'EDK_SOURCE' in os.environ.keys():
GenFdsGlobalVariable.EdkSourceDir = os.path.normcase(os.environ['EDK_SOURCE']) GenFdsGlobalVariable.EdkSourceDir = os.path.normcase(os.environ['EDK_SOURCE'])
if (Options.debug): if (Options.debug):
GenFdsGlobalVariable.VerboseLogger( "Using Workspace:" + Workspace) GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace)
os.chdir(GenFdsGlobalVariable.WorkSpaceDir) os.chdir(GenFdsGlobalVariable.WorkSpaceDir)
# set multiple workspace # set multiple workspace
@ -106,7 +106,7 @@ def main():
if FdfFilename[0:2] == '..': if FdfFilename[0:2] == '..':
FdfFilename = os.path.realpath(FdfFilename) FdfFilename = os.path.realpath(FdfFilename)
if not os.path.isabs (FdfFilename): if not os.path.isabs(FdfFilename):
FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename) FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename)
if not os.path.exists(FdfFilename): if not os.path.exists(FdfFilename):
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename) EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename)
@ -287,7 +287,7 @@ def main():
GenFds.DisplayFvSpaceInfo(FdfParserObj) GenFds.DisplayFvSpaceInfo(FdfParserObj)
except FdfParser.Warning, X: except FdfParser.Warning, X:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID ReturnCode = FORMAT_INVALID
except FatalError, X: except FatalError, X:
if Options.debug != None: if Options.debug != None:
@ -326,7 +326,7 @@ def SingleCheckCallback(option, opt_str, value, parser):
# #
def myOptionParser(): def myOptionParser():
usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\"" usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\""
Parser = OptionParser(usage=usage,description=__copyright__,version="%prog " + str(versionNumber)) Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback) Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback)
Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH") Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
@ -503,8 +503,8 @@ class GenFds :
if UsedSizeValue == TotalSizeValue: if UsedSizeValue == TotalSizeValue:
Percentage = '100' Percentage = '100'
else: else:
Percentage = str((UsedSizeValue+0.0)/TotalSizeValue)[0:4].lstrip('0.') Percentage = str((UsedSizeValue + 0.0) / TotalSizeValue)[0:4].lstrip('0.')
GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free') GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free')
## PreprocessImage() ## PreprocessImage()

View File

@ -274,7 +274,7 @@ class GenFdsGlobalVariable:
# @param ArchList The Arch list of platform # @param ArchList The Arch list of platform
# #
def SetDir (OutputDir, FdfParser, WorkSpace, ArchList): def SetDir (OutputDir, FdfParser, WorkSpace, ArchList):
GenFdsGlobalVariable.VerboseLogger( "GenFdsGlobalVariable.OutputDir :%s" %OutputDir) GenFdsGlobalVariable.VerboseLogger("GenFdsGlobalVariable.OutputDir :%s" % OutputDir)
# GenFdsGlobalVariable.OutputDirDict = OutputDir # GenFdsGlobalVariable.OutputDirDict = OutputDir
GenFdsGlobalVariable.FdfParser = FdfParser GenFdsGlobalVariable.FdfParser = FdfParser
GenFdsGlobalVariable.WorkSpace = WorkSpace GenFdsGlobalVariable.WorkSpace = WorkSpace
@ -292,7 +292,7 @@ class GenFdsGlobalVariable:
# Create FV Address inf file # Create FV Address inf file
# #
GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf') GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
FvAddressFile = open (GenFdsGlobalVariable.FvAddressFileName, 'w') FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
# #
# Add [Options] # Add [Options]
# #
@ -304,7 +304,7 @@ class GenFdsGlobalVariable:
break break
FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \ FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \
BsAddress + \ BsAddress + \
T_CHAR_LF) T_CHAR_LF)
RtAddress = '0' RtAddress = '0'
@ -313,7 +313,7 @@ class GenFdsGlobalVariable:
RtAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress RtAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress
FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \ FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \
RtAddress + \ RtAddress + \
T_CHAR_LF) T_CHAR_LF)
FvAddressFile.close() FvAddressFile.close()
@ -386,13 +386,13 @@ class GenFdsGlobalVariable:
CommandFile = Output + '.txt' CommandFile = Output + '.txt'
if Ui not in [None, '']: if Ui not in [None, '']:
#Cmd += ["-n", '"' + Ui + '"'] #Cmd += ["-n", '"' + Ui + '"']
SectionData = array.array('B', [0,0,0,0]) SectionData = array.array('B', [0, 0, 0, 0])
SectionData.fromstring(Ui.encode("utf_16_le")) SectionData.fromstring(Ui.encode("utf_16_le"))
SectionData.append(0) SectionData.append(0)
SectionData.append(0) SectionData.append(0)
Len = len(SectionData) Len = len(SectionData)
GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15) GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15)
SaveFileOnChange(Output, SectionData.tostring()) SaveFileOnChange(Output, SectionData.tostring())
elif Ver not in [None, '']: elif Ver not in [None, '']:
Cmd += ["-n", Ver] Cmd += ["-n", Ver]
if BuildNumber: if BuildNumber:
@ -461,12 +461,12 @@ class GenFdsGlobalVariable:
Cmd = ["GenFv"] Cmd = ["GenFv"]
if BaseAddress not in [None, '']: if BaseAddress not in [None, '']:
Cmd += ["-r", BaseAddress] Cmd += ["-r", BaseAddress]
if ForceRebase == False: if ForceRebase == False:
Cmd +=["-F", "FALSE"] Cmd += ["-F", "FALSE"]
elif ForceRebase == True: elif ForceRebase == True:
Cmd +=["-F", "TRUE"] Cmd += ["-F", "TRUE"]
if Capsule: if Capsule:
Cmd += ["-c"] Cmd += ["-c"]
if Dump: if Dump:
@ -570,7 +570,7 @@ class GenFdsGlobalVariable:
if VendorId != None: if VendorId != None:
Cmd += ["-f", VendorId] Cmd += ["-f", VendorId]
Cmd += ["-o", Output] Cmd += ["-o", Output]
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom") GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom")
@staticmethod @staticmethod
@ -606,7 +606,7 @@ class GenFdsGlobalVariable:
sys.stdout.write('\n') sys.stdout.write('\n')
try: try:
PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr= subprocess.PIPE, shell=True) PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception, X: except Exception, X:
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0])) EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
(out, error) = PopenObject.communicate() (out, error) = PopenObject.communicate()
@ -618,7 +618,7 @@ class GenFdsGlobalVariable:
returnValue[0] = PopenObject.returncode returnValue[0] = PopenObject.returncode
return return
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
GenFdsGlobalVariable.InfLogger ("Return Value = %d" %PopenObject.returncode) GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
GenFdsGlobalVariable.InfLogger (out) GenFdsGlobalVariable.InfLogger (out)
GenFdsGlobalVariable.InfLogger (error) GenFdsGlobalVariable.InfLogger (error)
if PopenObject.returncode != 0: if PopenObject.returncode != 0:
@ -631,7 +631,7 @@ class GenFdsGlobalVariable:
def InfLogger (msg): def InfLogger (msg):
EdkLogger.info(msg) EdkLogger.info(msg)
def ErrorLogger (msg, File = None, Line = None, ExtraData = None): def ErrorLogger (msg, File=None, Line=None, ExtraData=None):
EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData) EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData)
def DebugLogger (Level, msg): def DebugLogger (Level, msg):
@ -642,7 +642,7 @@ class GenFdsGlobalVariable:
# @param Str String that may contain macro # @param Str String that may contain macro
# @param MacroDict Dictionary that contains macro value pair # @param MacroDict Dictionary that contains macro value pair
# #
def MacroExtend (Str, MacroDict = {}, Arch = 'COMMON'): def MacroExtend (Str, MacroDict={}, Arch='COMMON'):
if Str == None : if Str == None :
return None return None
@ -699,10 +699,10 @@ class GenFdsGlobalVariable:
PcdValue = PcdObj.DefaultValue PcdValue = PcdObj.DefaultValue
return PcdValue return PcdValue
for Package in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform, for Package in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,
Arch, Arch,
GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.TargetName,
GenFdsGlobalVariable.ToolChainTag): GenFdsGlobalVariable.ToolChainTag):
PcdDict = Package.Pcds PcdDict = Package.Pcds
for Key in PcdDict: for Key in PcdDict:

View File

@ -53,7 +53,7 @@ class GuidSection(GuidSectionClassObject) :
# @param Dict dictionary contains macro and its value # @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment) # @retval tuple (Generated file name, section alignment)
# #
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):
# #
# Generate all section # Generate all section
# #
@ -65,7 +65,7 @@ class GuidSection(GuidSectionClassObject) :
self.SectionType = FfsInf.__ExtendMacro__(self.SectionType) self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
self.CurrentArchList = [FfsInf.CurrentArch] self.CurrentArchList = [FfsInf.CurrentArch]
SectFile = tuple() SectFile = tuple()
SectAlign = [] SectAlign = []
Index = 0 Index = 0
MaxAlign = None MaxAlign = None
@ -84,7 +84,7 @@ class GuidSection(GuidSectionClassObject) :
for Sect in self.SectionList: for Sect in self.SectionList:
Index = Index + 1 Index = Index + 1
SecIndex = '%s.%d' %(SecNum,Index) SecIndex = '%s.%d' % (SecNum, Index)
# set base address for inside FvImage # set base address for inside FvImage
if isinstance(Sect, FvImageSection): if isinstance(Sect, FvImageSection):
if self.FvAddr != []: if self.FvAddr != []:
@ -93,7 +93,7 @@ class GuidSection(GuidSectionClassObject) :
elif isinstance(Sect, GuidSection): elif isinstance(Sect, GuidSection):
Sect.FvAddr = self.FvAddr Sect.FvAddr = self.FvAddr
Sect.FvParentAddr = self.FvParentAddr Sect.FvParentAddr = self.FvParentAddr
ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList,FfsInf, Dict) ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict)
if isinstance(Sect, GuidSection): if isinstance(Sect, GuidSection):
if Sect.IncludeFvSection: if Sect.IncludeFvSection:
self.IncludeFvSection = Sect.IncludeFvSection self.IncludeFvSection = Sect.IncludeFvSection
@ -118,10 +118,10 @@ class GuidSection(GuidSectionClassObject) :
self.Alignment = MaxAlign self.Alignment = MaxAlign
OutputFile = OutputPath + \ OutputFile = OutputPath + \
os.sep + \ os.sep + \
ModuleName + \ ModuleName + \
'SEC' + \ 'SEC' + \
SecNum + \ SecNum + \
Ffs.SectionSuffix['GUIDED'] Ffs.SectionSuffix['GUIDED']
OutputFile = os.path.normpath(OutputFile) OutputFile = os.path.normpath(OutputFile)
@ -135,7 +135,7 @@ class GuidSection(GuidSectionClassObject) :
# GENCRC32 section # GENCRC32 section
# #
if self.NameGuid == None : if self.NameGuid == None :
GenFdsGlobalVariable.VerboseLogger( "Use GenSection function Generate CRC32 Section") GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")
GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign) GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign)
OutputFileList = [] OutputFileList = []
OutputFileList.append(OutputFile) OutputFileList.append(OutputFile)
@ -144,7 +144,7 @@ class GuidSection(GuidSectionClassObject) :
elif ExternalTool == None: elif ExternalTool == None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid) EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
else: else:
DummyFile = OutputFile+".dummy" DummyFile = OutputFile + ".dummy"
# #
# Call GenSection with DUMMY section type. # Call GenSection with DUMMY section type.
# #
@ -153,10 +153,10 @@ class GuidSection(GuidSectionClassObject) :
# Use external tool process the Output # Use external tool process the Output
# #
TempFile = OutputPath + \ TempFile = OutputPath + \
os.sep + \ os.sep + \
ModuleName + \ ModuleName + \
'SEC' + \ 'SEC' + \
SecNum + \ SecNum + \
'.tmp' '.tmp'
TempFile = os.path.normpath(TempFile) TempFile = os.path.normpath(TempFile)
# #
@ -197,12 +197,12 @@ class GuidSection(GuidSectionClassObject) :
if not os.path.exists(TempFile): if not os.path.exists(TempFile):
EdkLogger.error("GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool) EdkLogger.error("GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool)
FileHandleIn = open(DummyFile,'rb') FileHandleIn = open(DummyFile, 'rb')
FileHandleIn.seek(0,2) FileHandleIn.seek(0, 2)
InputFileSize = FileHandleIn.tell() InputFileSize = FileHandleIn.tell()
FileHandleOut = open(TempFile,'rb') FileHandleOut = open(TempFile, 'rb')
FileHandleOut.seek(0,2) FileHandleOut.seek(0, 2)
TempFileSize = FileHandleOut.tell() TempFileSize = FileHandleOut.tell()
Attribute = [] Attribute = []
@ -213,7 +213,7 @@ class GuidSection(GuidSectionClassObject) :
if self.ProcessRequired == "NONE" and HeaderLength == None: if self.ProcessRequired == "NONE" and HeaderLength == None:
if TempFileSize > InputFileSize: if TempFileSize > InputFileSize:
FileHandleIn.seek(0) FileHandleIn.seek(0)
BufferIn = FileHandleIn.read() BufferIn = FileHandleIn.read()
FileHandleOut.seek(0) FileHandleOut.seek(0)
BufferOut = FileHandleOut.read() BufferOut = FileHandleOut.read()
if BufferIn == BufferOut[TempFileSize - InputFileSize:]: if BufferIn == BufferOut[TempFileSize - InputFileSize:]:
@ -224,18 +224,18 @@ class GuidSection(GuidSectionClassObject) :
FileHandleIn.close() FileHandleIn.close()
FileHandleOut.close() FileHandleOut.close()
if FirstCall and 'PROCESSING_REQUIRED' in Attribute: if FirstCall and 'PROCESSING_REQUIRED' in Attribute:
# Guided data by -z option on first call is the process required data. Call the guided tool with the real option. # Guided data by -z option on first call is the process required data. Call the guided tool with the real option.
GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption) GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption)
# #
# Call Gensection Add Section Header # Call Gensection Add Section Header
# #
if self.ProcessRequired in ("TRUE", "1"): if self.ProcessRequired in ("TRUE", "1"):
if 'PROCESSING_REQUIRED' not in Attribute: if 'PROCESSING_REQUIRED' not in Attribute:
Attribute.append('PROCESSING_REQUIRED') Attribute.append('PROCESSING_REQUIRED')
if self.AuthStatusValid in ("TRUE", "1"): if self.AuthStatusValid in ("TRUE", "1"):
Attribute.append('AUTH_STATUS_VALID') Attribute.append('AUTH_STATUS_VALID')
GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'], GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
@ -263,7 +263,7 @@ class GuidSection(GuidSectionClassObject) :
ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']: if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain) EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)
self.KeyStringList = [Target+'_'+ToolChain+'_'+self.CurrentArchList[0]] self.KeyStringList = [Target + '_' + ToolChain + '_' + self.CurrentArchList[0]]
for Arch in self.CurrentArchList: for Arch in self.CurrentArchList:
if Target + '_' + ToolChain + '_' + Arch not in self.KeyStringList: if Target + '_' + ToolChain + '_' + Arch not in self.KeyStringList:
self.KeyStringList.append(Target + '_' + ToolChain + '_' + Arch) self.KeyStringList.append(Target + '_' + ToolChain + '_' + Arch)
@ -275,30 +275,30 @@ class GuidSection(GuidSectionClassObject) :
if self.NameGuid == ToolDef[1]: if self.NameGuid == ToolDef[1]:
KeyList = ToolDef[0].split('_') KeyList = ToolDef[0].split('_')
Key = KeyList[0] + \ Key = KeyList[0] + \
'_' + \ '_' + \
KeyList[1] + \ KeyList[1] + \
'_' + \ '_' + \
KeyList[2] KeyList[2]
if Key in self.KeyStringList and KeyList[4] == 'GUID': if Key in self.KeyStringList and KeyList[4] == 'GUID':
ToolPath = ToolDefinition.get( Key + \ ToolPath = ToolDefinition.get(Key + \
'_' + \ '_' + \
KeyList[3] + \ KeyList[3] + \
'_' + \ '_' + \
'PATH') 'PATH')
ToolOption = ToolDefinition.get( Key + \ ToolOption = ToolDefinition.get(Key + \
'_' + \ '_' + \
KeyList[3] + \ KeyList[3] + \
'_' + \ '_' + \
'FLAGS') 'FLAGS')
if ToolPathTmp == None: if ToolPathTmp == None:
ToolPathTmp = ToolPath ToolPathTmp = ToolPath
else: else:
if ToolPathTmp != ToolPath: if ToolPathTmp != ToolPath:
EdkLogger.error("GenFds", GENFDS_ERROR, "Don't know which tool to use, %s or %s ?" % (ToolPathTmp, ToolPath)) EdkLogger.error("GenFds", GENFDS_ERROR, "Don't know which tool to use, %s or %s ?" % (ToolPathTmp, ToolPath))
return ToolPathTmp, ToolOption return ToolPathTmp, ToolOption

View File

@ -54,10 +54,10 @@ class Region(RegionClassObject):
# @retval string Generated FV file path # @retval string Generated FV file path
# #
def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict = None, MacroDict = {}): def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict=None, MacroDict={}):
Size = self.Size Size = self.Size
GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset) GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset)
GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" %Size) GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size)
GenFdsGlobalVariable.SharpCounter = 0 GenFdsGlobalVariable.SharpCounter = 0
if self.RegionType == 'FV': if self.RegionType == 'FV':
@ -65,13 +65,13 @@ class Region(RegionClassObject):
# Get Fv from FvDict # Get Fv from FvDict
# #
self.FvAddress = int(BaseAddress, 16) + self.Offset self.FvAddress = int(BaseAddress, 16) + self.Offset
FvBaseAddress = '0x%X' %self.FvAddress FvBaseAddress = '0x%X' % self.FvAddress
FvOffset = 0 FvOffset = 0
for RegionData in self.RegionDataList: for RegionData in self.RegionDataList:
FileName = None FileName = None
if RegionData.endswith(".fv"): if RegionData.endswith(".fv"):
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s'%RegionData) GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s' % RegionData)
if RegionData[1] != ':' : if RegionData[1] != ':' :
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
if not os.path.exists(RegionData): if not os.path.exists(RegionData):
@ -101,7 +101,7 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
FvBuffer = StringIO.StringIO('') FvBuffer = StringIO.StringIO('')
FvBaseAddress = '0x%X' %self.FvAddress FvBaseAddress = '0x%X' % self.FvAddress
BlockSize = None BlockSize = None
BlockNum = None BlockNum = None
FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict) FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)
@ -128,7 +128,7 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"Size of FV File (%s) is larger than Region Size 0x%X specified." \ "Size of FV File (%s) is larger than Region Size 0x%X specified." \
% (RegionData, Size)) % (RegionData, Size))
BinFile = open (FileName, 'r+b') BinFile = open(FileName, 'r+b')
Buffer.write(BinFile.read()) Buffer.write(BinFile.read())
BinFile.close() BinFile.close()
Size = Size - FileLength Size = Size - FileLength
@ -150,7 +150,7 @@ class Region(RegionClassObject):
for RegionData in self.RegionDataList: for RegionData in self.RegionDataList:
if RegionData.endswith(".cap"): if RegionData.endswith(".cap"):
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s'%RegionData) GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s' % RegionData)
if RegionData[1] != ':' : if RegionData[1] != ':' :
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
if not os.path.exists(RegionData): if not os.path.exists(RegionData):
@ -187,7 +187,7 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \ "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \
% (FileLength, RegionData, Size)) % (FileLength, RegionData, Size))
BinFile = open (FileName, 'r+b') BinFile = open(FileName, 'r+b')
Buffer.write(BinFile.read()) Buffer.write(BinFile.read())
BinFile.close() BinFile.close()
Size = Size - FileLength Size = Size - FileLength
@ -217,8 +217,8 @@ class Region(RegionClassObject):
EdkLogger.error("GenFds", GENFDS_ERROR, EdkLogger.error("GenFds", GENFDS_ERROR,
"Size of File (%s) is larger than Region Size 0x%X specified." \ "Size of File (%s) is larger than Region Size 0x%X specified." \
% (RegionData, Size)) % (RegionData, Size))
GenFdsGlobalVariable.InfLogger(' Region File Name = %s'%RegionData) GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData)
BinFile = open (RegionData, 'rb') BinFile = open(RegionData, 'rb')
Buffer.write(BinFile.read()) Buffer.write(BinFile.read())
BinFile.close() BinFile.close()
Size = Size - FileLength Size = Size - FileLength
@ -273,17 +273,17 @@ class Region(RegionClassObject):
Granu = 1024 Granu = 1024
Str = Str[:-1] Str = Str[:-1]
elif Str.endswith('M'): elif Str.endswith('M'):
Granu = 1024*1024 Granu = 1024 * 1024
Str = Str[:-1] Str = Str[:-1]
elif Str.endswith('G'): elif Str.endswith('G'):
Granu = 1024*1024*1024 Granu = 1024 * 1024 * 1024
Str = Str[:-1] Str = Str[:-1]
else: else:
pass pass
AlignValue = int(Str)*Granu AlignValue = int(Str) * Granu
return AlignValue return AlignValue
## BlockSizeOfRegion() ## BlockSizeOfRegion()
# #
# @param BlockSizeList List of block information # @param BlockSizeList List of block information
@ -304,7 +304,7 @@ class Region(RegionClassObject):
else: else:
# region ended within current blocks # region ended within current blocks
if self.Offset + self.Size <= End: if self.Offset + self.Size <= End:
ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1)/BlockSize)) ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1) / BlockSize))
break break
# region not ended yet # region not ended yet
else: else:
@ -313,11 +313,11 @@ class Region(RegionClassObject):
UsedBlockNum = BlockNum UsedBlockNum = BlockNum
# region started in middle of current blocks # region started in middle of current blocks
else: else:
UsedBlockNum = (End - self.Offset)/BlockSize UsedBlockNum = (End - self.Offset) / BlockSize
Start = End Start = End
ExpectedList.append((BlockSize, UsedBlockNum)) ExpectedList.append((BlockSize, UsedBlockNum))
RemindingSize -= BlockSize * UsedBlockNum RemindingSize -= BlockSize * UsedBlockNum
if FvObj.BlockSizeList == []: if FvObj.BlockSizeList == []:
FvObj.BlockSizeList = ExpectedList FvObj.BlockSizeList = ExpectedList
else: else:
@ -333,22 +333,22 @@ class Region(RegionClassObject):
Sum += Item[0] * Item[1] Sum += Item[0] * Item[1]
if self.Size < Sum: if self.Size < Sum:
EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x " EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x "
%(FvObj.UiFvName, Sum, self.Size)) % (FvObj.UiFvName, Sum, self.Size))
# check whether the BlockStatements in FV section is appropriate # check whether the BlockStatements in FV section is appropriate
ExpectedListData = '' ExpectedListData = ''
for Item in ExpectedList: for Item in ExpectedList:
ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t"%Item ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t" % Item
Index = 0 Index = 0
for Item in FvObj.BlockSizeList: for Item in FvObj.BlockSizeList:
if Item[0] != ExpectedList[Index][0]: if Item[0] != ExpectedList[Index][0]:
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement" EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
%FvObj.UiFvName, ExtraData = ExpectedListData) % FvObj.UiFvName, ExtraData=ExpectedListData)
elif Item[1] != ExpectedList[Index][1]: elif Item[1] != ExpectedList[Index][1]:
if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1): if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1):
break; break;
else: else:
EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement" EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
%FvObj.UiFvName, ExtraData = ExpectedListData) % FvObj.UiFvName, ExtraData=ExpectedListData)
else: else:
Index += 1 Index += 1

View File

@ -48,7 +48,7 @@ class UiSection (UiSectionClassObject):
# @param Dict dictionary contains macro and its value # @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment) # @retval tuple (Generated file name, section alignment)
# #
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):
# #
# Prepare the parameter of GenSection # Prepare the parameter of GenSection
# #

View File

@ -48,7 +48,7 @@ class VerSection (VerSectionClassObject):
# @param Dict dictionary contains macro and its value # @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment) # @retval tuple (Generated file name, section alignment)
# #
def GenSection(self,OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}):
# #
# Prepare the parameter of GenSection # Prepare the parameter of GenSection
# #

View File

@ -67,80 +67,80 @@ class Vtf (VtfClassObject):
def GenBsfInf (self): def GenBsfInf (self):
FvList = self.GetFvList() FvList = self.GetFvList()
self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf') self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')
BsfInf = open (self.BsfInfName, 'w+') BsfInf = open(self.BsfInfName, 'w+')
if self.ResetBin != None: if self.ResetBin != None:
BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF) BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF)
BsfInf.writelines ("IA32_RST_BIN" + \ BsfInf.writelines ("IA32_RST_BIN" + \
" = " + \ " = " + \
GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.ResetBin)) + \ GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.ResetBin)) + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines (T_CHAR_LF ) BsfInf.writelines (T_CHAR_LF)
BsfInf.writelines ("[COMPONENTS]" + T_CHAR_LF) BsfInf.writelines ("[COMPONENTS]" + T_CHAR_LF)
for ComponentObj in self.ComponentStatementList : for ComponentObj in self.ComponentStatementList :
BsfInf.writelines ("COMP_NAME" + \ BsfInf.writelines ("COMP_NAME" + \
" = " + \ " = " + \
ComponentObj.CompName + \ ComponentObj.CompName + \
T_CHAR_LF ) T_CHAR_LF)
if ComponentObj.CompLoc.upper() == 'NONE': if ComponentObj.CompLoc.upper() == 'NONE':
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
'N' + \ 'N' + \
T_CHAR_LF ) T_CHAR_LF)
elif ComponentObj.FilePos != None: elif ComponentObj.FilePos != None:
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
ComponentObj.FilePos + \ ComponentObj.FilePos + \
T_CHAR_LF ) T_CHAR_LF)
else: else:
Index = FvList.index(ComponentObj.CompLoc.upper()) Index = FvList.index(ComponentObj.CompLoc.upper())
if Index == 0: if Index == 0:
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
'F' + \ 'F' + \
T_CHAR_LF ) T_CHAR_LF)
elif Index == 1: elif Index == 1:
BsfInf.writelines ("COMP_LOC" + \ BsfInf.writelines ("COMP_LOC" + \
" = " + \ " = " + \
'S' + \ 'S' + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_TYPE" + \ BsfInf.writelines ("COMP_TYPE" + \
" = " + \ " = " + \
ComponentObj.CompType + \ ComponentObj.CompType + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_VER" + \ BsfInf.writelines ("COMP_VER" + \
" = " + \ " = " + \
ComponentObj.CompVer + \ ComponentObj.CompVer + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_CS" + \ BsfInf.writelines ("COMP_CS" + \
" = " + \ " = " + \
ComponentObj.CompCs + \ ComponentObj.CompCs + \
T_CHAR_LF ) T_CHAR_LF)
BinPath = ComponentObj.CompBin BinPath = ComponentObj.CompBin
if BinPath != '-': if BinPath != '-':
BinPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(BinPath)) BinPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(BinPath))
BsfInf.writelines ("COMP_BIN" + \ BsfInf.writelines ("COMP_BIN" + \
" = " + \ " = " + \
BinPath + \ BinPath + \
T_CHAR_LF ) T_CHAR_LF)
SymPath = ComponentObj.CompSym SymPath = ComponentObj.CompSym
if SymPath != '-': if SymPath != '-':
SymPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(SymPath)) SymPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(SymPath))
BsfInf.writelines ("COMP_SYM" + \ BsfInf.writelines ("COMP_SYM" + \
" = " + \ " = " + \
SymPath + \ SymPath + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines ("COMP_SIZE" + \ BsfInf.writelines ("COMP_SIZE" + \
" = " + \ " = " + \
ComponentObj.CompSize + \ ComponentObj.CompSize + \
T_CHAR_LF ) T_CHAR_LF)
BsfInf.writelines (T_CHAR_LF ) BsfInf.writelines (T_CHAR_LF)
BsfInf.close() BsfInf.close()
## GenFvList() method ## GenFvList() method
@ -170,7 +170,7 @@ class Vtf (VtfClassObject):
(BaseAddress, Size) = FdAddressDict.get(i) (BaseAddress, Size) = FdAddressDict.get(i)
CmdStr += ( CmdStr += (
'-r', '0x%x' % BaseAddress, '-r', '0x%x' % BaseAddress,
'-s', '0x%x' %Size, '-s', '0x%x' % Size,
) )
return CmdStr return CmdStr

View File

@ -112,11 +112,11 @@ def _parseGeneral(lines, efifilepath):
@param lines line array for map file @param lines line array for map file
@return a list which element hold (PcdName, Offset, SectionName) @return a list which element hold (PcdName, Offset, SectionName)
""" """
status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
secs = [] # key = section name secs = [] # key = section name
bPcds = [] bPcds = []
for line in lines: for line in lines:
line = line.strip() line = line.strip()
@ -128,9 +128,9 @@ def _parseGeneral(lines, efifilepath):
continue continue
if re.match("^entry point at", line): if re.match("^entry point at", line):
status = 3 status = 3
continue continue
if status == 1 and len(line) != 0: if status == 1 and len(line) != 0:
m = secRe.match(line) m = secRe.match(line)
assert m != None, "Fail to parse the section in map file , line is %s" % line assert m != None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0) sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class]) secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
@ -138,9 +138,9 @@ def _parseGeneral(lines, efifilepath):
m = symRe.match(line) m = symRe.match(line)
assert m != None, "Fail to parse the symbol in map file, line is %s" % line assert m != None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0) sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16) sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16) sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16) vir_addr = int(vir_addr, 16)
m2 = re.match('^[_]+gPcd_BinaryPatch_([\w]+)', sym_name) m2 = re.match('^[_]+gPcd_BinaryPatch_([\w]+)', sym_name)
if m2 != None: if m2 != None:
# fond a binary pcd entry in map file # fond a binary pcd entry in map file
@ -179,7 +179,7 @@ def generatePcdTable(list, pcdpath):
f.close() f.close()
#print 'Success to generate Binary Patch PCD table at %s!' % pcdpath #print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
if __name__ == '__main__': if __name__ == '__main__':
UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>" UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>"
AdditionalNotes = "\nPCD table is generated in file name with .BinaryPcdTable.txt postfix" AdditionalNotes = "\nPCD table is generated in file name with .BinaryPcdTable.txt postfix"
@ -196,12 +196,12 @@ if __name__ == '__main__':
if options.mapfile == None or options.efifile == None: if options.mapfile == None or options.efifile == None:
print parser.get_usage() print parser.get_usage()
elif os.path.exists(options.mapfile) and os.path.exists(options.efifile): elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):
list = parsePcdInfoFromMapFile(options.mapfile, options.efifile) list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)
if list != None: if list != None:
if options.outfile != None: if options.outfile != None:
generatePcdTable(list, options.outfile) generatePcdTable(list, options.outfile)
else: else:
generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt')) generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))
else: else:
print 'Fail to generate Patch PCD Table based on map file and efi file' print 'Fail to generate Patch PCD Table based on map file and efi file'
else: else:

View File

@ -48,7 +48,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
# #
# Length of Binary File # Length of Binary File
# #
FileHandle = open (FileName, 'rb') FileHandle = open(FileName, 'rb')
FileHandle.seek (0, 2) FileHandle.seek (0, 2)
FileLength = FileHandle.tell() FileLength = FileHandle.tell()
FileHandle.close() FileHandle.close()
@ -75,7 +75,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD." return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD."
ValueLength = int(MaxSize) ValueLength = int(MaxSize)
else: else:
return PARAMETER_INVALID, "PCD type %s is not valid." %(CommandOptions.PcdTypeName) return PARAMETER_INVALID, "PCD type %s is not valid." % (CommandOptions.PcdTypeName)
# #
# Check PcdValue is in the input binary file. # Check PcdValue is in the input binary file.
# #
@ -84,7 +84,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
# #
# Read binary file into array # Read binary file into array
# #
FileHandle = open (FileName, 'rb') FileHandle = open(FileName, 'rb')
ByteArray = array.array('B') ByteArray = array.array('B')
ByteArray.fromfile(FileHandle, FileLength) ByteArray.fromfile(FileHandle, FileLength)
FileHandle.close() FileHandle.close()
@ -117,7 +117,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
if ValueNumber != 0: if ValueNumber != 0:
ValueNumber = 1 ValueNumber = 1
except: except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." %(ValueString) return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
# #
# Set PCD value into binary data # Set PCD value into binary data
# #
@ -132,7 +132,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
else: else:
ValueNumber = int (ValueString) ValueNumber = int (ValueString)
except: except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." %(ValueString) return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
# #
# Set PCD value into binary data # Set PCD value into binary data
# #
@ -174,7 +174,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
if Index >= ValueLength: if Index >= ValueLength:
break break
except: except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." %(ValueString) return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)
else: else:
# #
# Patch ascii string # Patch ascii string
@ -197,10 +197,10 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
if ByteList != OrigByteList: if ByteList != OrigByteList:
ByteArray = array.array('B') ByteArray = array.array('B')
ByteArray.fromlist(ByteList) ByteArray.fromlist(ByteList)
FileHandle = open (FileName, 'wb') FileHandle = open(FileName, 'wb')
ByteArray.tofile(FileHandle) ByteArray.tofile(FileHandle)
FileHandle.close() FileHandle.close()
return 0, "Patch Value into File %s successfully." %(FileName) return 0, "Patch Value into File %s successfully." % (FileName)
## Parse command line options ## Parse command line options
# #
@ -270,7 +270,7 @@ def Main():
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.") EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
return 1 return 1
if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]: if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]:
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." %(CommandOptions.PcdTypeName)) EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
return 1 return 1
if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None: if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.") EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")

View File

@ -68,14 +68,14 @@ class TableReport(Table):
# @param Enabled: If this error enabled # @param Enabled: If this error enabled
# @param Corrected: if this error corrected # @param Corrected: if this error corrected
# #
def Insert(self, ErrorID, OtherMsg = '', BelongsToTable = '', BelongsToItem = -1, Enabled = 0, Corrected = -1): def Insert(self, ErrorID, OtherMsg='', BelongsToTable='', BelongsToItem= -1, Enabled=0, Corrected= -1):
self.ID = self.ID + 1 self.ID = self.ID + 1
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \
% (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected) % (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected)
Table.Insert(self, SqlCommand) Table.Insert(self, SqlCommand)
return self.ID return self.ID
## Query table ## Query table
# #
# @retval: A recordSet of all found records # @retval: A recordSet of all found records
@ -98,7 +98,7 @@ class TableReport(Table):
# #
# @param Filename: To filename to save the report content # @param Filename: To filename to save the report content
# #
def ToCSV(self, Filename = 'Report.csv'): def ToCSV(self, Filename='Report.csv'):
try: try:
File = open(Filename, 'w+') File = open(Filename, 'w+')
File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""") File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""")
@ -123,7 +123,7 @@ class TableReport(Table):
if NewRecord != []: if NewRecord != []:
File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg)) File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg))
EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg)) EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg))
File.close() File.close()
except IOError: except IOError:
NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime()) NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime())

View File

@ -1525,7 +1525,7 @@ class DscParser(MetaFileParser):
ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType) ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)
if not Valid: if not Valid:
EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex+1, EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1,
ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2])) ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
PcdValue = ValList[Index] PcdValue = ValList[Index]
if PcdValue: if PcdValue:

View File

@ -1863,11 +1863,11 @@ class InfBuildData(ModuleBuildClassObject):
LineNo = Record[6] LineNo = Record[6]
break break
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType,' '.join(l for l in SUP_MODULE_LIST)), "MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
File=self.MetaFile, Line=LineNo) File=self.MetaFile, Line=LineNo)
if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A): if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
if self._ModuleType == SUP_MODULE_SMM_CORE: if self._ModuleType == SUP_MODULE_SMM_CORE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile) EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)
if self._Defs and 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \ if self._Defs and 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \
and 'PCI_CLASS_CODE' in self._Defs: and 'PCI_CLASS_CODE' in self._Defs:
self._BuildType = 'UEFI_OPTIONROM' self._BuildType = 'UEFI_OPTIONROM'
@ -1876,7 +1876,7 @@ class InfBuildData(ModuleBuildClassObject):
self._BuildType = 'UEFI_HII' self._BuildType = 'UEFI_HII'
else: else:
self._BuildType = self._ModuleType.upper() self._BuildType = self._ModuleType.upper()
if self._DxsFile: if self._DxsFile:
File = PathClass(NormPath(self._DxsFile), self._ModuleDir, Arch=self._Arch) File = PathClass(NormPath(self._DxsFile), self._ModuleDir, Arch=self._Arch)
# check the file validation # check the file validation
@ -1891,7 +1891,7 @@ class InfBuildData(ModuleBuildClassObject):
if not self._ComponentType: if not self._ComponentType:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"COMPONENT_TYPE is not given", File=self.MetaFile) "COMPONENT_TYPE is not given", File=self.MetaFile)
self._BuildType = self._ComponentType.upper() self._BuildType = self._ComponentType.upper()
if self._ComponentType in self._MODULE_TYPE_: if self._ComponentType in self._MODULE_TYPE_:
self._ModuleType = self._MODULE_TYPE_[self._ComponentType] self._ModuleType = self._MODULE_TYPE_[self._ComponentType]
if self._ComponentType == 'LIBRARY': if self._ComponentType == 'LIBRARY':
@ -1901,7 +1901,7 @@ class InfBuildData(ModuleBuildClassObject):
Macros["EDK_SOURCE"] = GlobalData.gEcpSource Macros["EDK_SOURCE"] = GlobalData.gEcpSource
Macros['PROCESSOR'] = self._Arch Macros['PROCESSOR'] = self._Arch
RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform] RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform]
for Name,Value,Dummy,Arch,Platform,ID,LineNo in RecordList: for Name, Value, Dummy, Arch, Platform, ID, LineNo in RecordList:
Value = ReplaceMacro(Value, Macros, True) Value = ReplaceMacro(Value, Macros, True)
if Name == "IMAGE_ENTRY_POINT": if Name == "IMAGE_ENTRY_POINT":
if self._ModuleEntryPointList == None: if self._ModuleEntryPointList == None:
@ -2584,7 +2584,7 @@ class InfBuildData(ModuleBuildClassObject):
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdCName, str(Package)), "No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
# #
@ -2597,7 +2597,7 @@ class InfBuildData(ModuleBuildClassObject):
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)), "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
@ -2611,19 +2611,19 @@ class InfBuildData(ModuleBuildClassObject):
EdkLogger.error( EdkLogger.error(
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)), "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
except: except:
EdkLogger.error( EdkLogger.error(
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!"% (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)), "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!" % (Pcd.TokenValue, TokenSpaceGuid, PcdCName, str(Package)),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData=None ExtraData=None
) )
Pcd.DatumType = PcdInPackage.DatumType Pcd.DatumType = PcdInPackage.DatumType
Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize
Pcd.InfDefaultValue = Pcd.DefaultValue Pcd.InfDefaultValue = Pcd.DefaultValue
@ -2635,7 +2635,7 @@ class InfBuildData(ModuleBuildClassObject):
'build', 'build',
FORMAT_INVALID, FORMAT_INVALID,
"PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile), "PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile),
File =self.MetaFile, Line=LineNo, File=self.MetaFile, Line=LineNo,
ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages]) ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages])
) )
Pcds[PcdCName, TokenSpaceGuid] = Pcd Pcds[PcdCName, TokenSpaceGuid] = Pcd
@ -2946,7 +2946,7 @@ determine whether database file is out of date!\n")
## Summarize all packages in the database ## Summarize all packages in the database
def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag): def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag):
self.Platform = Platform self.Platform = Platform
PackageList =[] PackageList = []
Pa = self.BuildObject[self.Platform, 'COMMON'] Pa = self.BuildObject[self.Platform, 'COMMON']
# #
# Get Package related to Modules # Get Package related to Modules
@ -2963,8 +2963,8 @@ determine whether database file is out of date!\n")
LibObj = self.BuildObject[Lib, Arch, TargetName, ToolChainTag] LibObj = self.BuildObject[Lib, Arch, TargetName, ToolChainTag]
for Package in LibObj.Packages: for Package in LibObj.Packages:
if Package not in PackageList: if Package not in PackageList:
PackageList.append(Package) PackageList.append(Package)
return PackageList return PackageList
## Summarize all platforms in the database ## Summarize all platforms in the database

View File

@ -78,13 +78,13 @@ gLineMaxLength = 120
gEndOfLine = "\r\n" gEndOfLine = "\r\n"
## Tags for section start, end and separator ## Tags for section start, end and separator
gSectionStart = ">" + "=" * (gLineMaxLength-2) + "<" gSectionStart = ">" + "=" * (gLineMaxLength - 2) + "<"
gSectionEnd = "<" + "=" * (gLineMaxLength-2) + ">" + "\n" gSectionEnd = "<" + "=" * (gLineMaxLength - 2) + ">" + "\n"
gSectionSep = "=" * gLineMaxLength gSectionSep = "=" * gLineMaxLength
## Tags for subsection start, end and separator ## Tags for subsection start, end and separator
gSubSectionStart = ">" + "-" * (gLineMaxLength-2) + "<" gSubSectionStart = ">" + "-" * (gLineMaxLength - 2) + "<"
gSubSectionEnd = "<" + "-" * (gLineMaxLength-2) + ">" gSubSectionEnd = "<" + "-" * (gLineMaxLength - 2) + ">"
gSubSectionSep = "-" * gLineMaxLength gSubSectionSep = "-" * gLineMaxLength
@ -233,7 +233,7 @@ class DepexParser(object):
def __init__(self, Wa): def __init__(self, Wa):
self._GuidDb = {} self._GuidDb = {}
for Pa in Wa.AutoGenObjectList: for Pa in Wa.AutoGenObjectList:
for Package in Pa.PackageList: for Package in Pa.PackageList:
for Protocol in Package.Protocols: for Protocol in Package.Protocols:
GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol]) GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])
self._GuidDb[GuidValue.upper()] = Protocol self._GuidDb[GuidValue.upper()] = Protocol
@ -265,8 +265,8 @@ class DepexParser(object):
GuidString = self._GuidDb.get(GuidValue, GuidValue) GuidString = self._GuidDb.get(GuidValue, GuidValue)
Statement = "%s %s" % (Statement, GuidString) Statement = "%s %s" % (Statement, GuidString)
DepexStatement.append(Statement) DepexStatement.append(Statement)
OpCode = DepexFile.read(1) OpCode = DepexFile.read(1)
return DepexStatement return DepexStatement
## ##
@ -361,7 +361,7 @@ class DepexReport(object):
# #
def __init__(self, M): def __init__(self, M):
self.Depex = "" self.Depex = ""
self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex") self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")
ModuleType = M.ModuleType ModuleType = M.ModuleType
if not ModuleType: if not ModuleType:
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "") ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")
@ -536,7 +536,7 @@ class ModuleReport(object):
# If a module complies to PI 1.1, promote Module type to "SMM_DRIVER" # If a module complies to PI 1.1, promote Module type to "SMM_DRIVER"
# #
if ModuleType == "DXE_SMM_DRIVER": if ModuleType == "DXE_SMM_DRIVER":
PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000") PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000")
if int(PiSpec, 0) >= 0x0001000A: if int(PiSpec, 0) >= 0x0001000A:
ModuleType = "SMM_DRIVER" ModuleType = "SMM_DRIVER"
self.DriverType = gDriverTypeMap.get(ModuleType, "0x2 (FREE_FORM)") self.DriverType = gDriverTypeMap.get(ModuleType, "0x2 (FREE_FORM)")
@ -813,20 +813,20 @@ class PcdReport(object):
# Report PCD item according to their override relationship # Report PCD item according to their override relationship
# #
if DecMatch and InfMatch: if DecMatch and InfMatch:
FileWrite(File, ' %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
else: else:
if DscMatch: if DscMatch:
if (Pcd.TokenCName, Key) in self.FdfPcdSet: if (Pcd.TokenCName, Key) in self.FdfPcdSet:
FileWrite(File, ' *F %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' *F %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
else: else:
FileWrite(File, ' *P %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' *P %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
else: else:
FileWrite(File, ' *M %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '('+Pcd.DatumType+')', PcdValue.strip())) FileWrite(File, ' *M %-*s: %6s %10s = %-22s' % (self.MaxLen, Pcd.TokenCName, TypeName, '(' + Pcd.DatumType + ')', PcdValue.strip()))
if TypeName in ('DYNHII', 'DEXHII', 'DYNVPD', 'DEXVPD'): if TypeName in ('DYNHII', 'DEXHII', 'DYNVPD', 'DEXVPD'):
for SkuInfo in Pcd.SkuInfoList.values(): for SkuInfo in Pcd.SkuInfoList.values():
if TypeName in ('DYNHII', 'DEXHII'): if TypeName in ('DYNHII', 'DEXHII'):
FileWrite(File, '%*s: %s: %s' % (self.MaxLen + 4, SkuInfo.VariableGuid, SkuInfo.VariableName, SkuInfo.VariableOffset)) FileWrite(File, '%*s: %s: %s' % (self.MaxLen + 4, SkuInfo.VariableGuid, SkuInfo.VariableName, SkuInfo.VariableOffset))
else: else:
FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset)) FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset))

View File

@ -210,7 +210,7 @@ def NormFile(FilePath, Workspace):
# check if the file path exists or not # check if the file path exists or not
if not os.path.isfile(FileFullPath): if not os.path.isfile(FileFullPath):
EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath) EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath)
# remove workspace directory from the beginning part of the file path # remove workspace directory from the beginning part of the file path
if Workspace[-1] in ["\\", "/"]: if Workspace[-1] in ["\\", "/"]:
@ -1122,13 +1122,13 @@ class Build():
# Update Image to new BaseAddress by GenFw tool # Update Image to new BaseAddress by GenFw tool
# #
LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir) LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir) LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
else: else:
# #
# Set new address to the section header only for SMM driver. # Set new address to the section header only for SMM driver.
# #
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir) LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir) LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
# #
# Collect funtion address from Map file # Collect funtion address from Map file
# #
@ -1136,7 +1136,7 @@ class Build():
FunctionList = [] FunctionList = []
if os.path.exists(ImageMapTable): if os.path.exists(ImageMapTable):
OrigImageBaseAddress = 0 OrigImageBaseAddress = 0
ImageMap = open (ImageMapTable, 'r') ImageMap = open(ImageMapTable, 'r')
for LinStr in ImageMap: for LinStr in ImageMap:
if len (LinStr.strip()) == 0: if len (LinStr.strip()) == 0:
continue continue
@ -1149,7 +1149,7 @@ class Build():
StrList = LinStr.split() StrList = LinStr.split()
if len (StrList) > 4: if len (StrList) > 4:
if StrList[3] == 'f' or StrList[3] =='F': if StrList[3] == 'f' or StrList[3] == 'F':
Name = StrList[1] Name = StrList[1]
RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress
FunctionList.append ((Name, RelativeAddress)) FunctionList.append ((Name, RelativeAddress))
@ -1273,7 +1273,7 @@ class Build():
if not ImageClass.IsValid: if not ImageClass.IsValid:
EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo) EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo)
ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass) ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass)
if Module.ModuleType in ['PEI_CORE', 'PEIM', 'COMBINED_PEIM_DRIVER','PIC_PEIM', 'RELOCATABLE_PEIM', 'DXE_CORE']: if Module.ModuleType in ['PEI_CORE', 'PEIM', 'COMBINED_PEIM_DRIVER', 'PIC_PEIM', 'RELOCATABLE_PEIM', 'DXE_CORE']:
PeiModuleList[Module.MetaFile] = ImageInfo PeiModuleList[Module.MetaFile] = ImageInfo
PeiSize += ImageInfo.Image.Size PeiSize += ImageInfo.Image.Size
elif Module.ModuleType in ['BS_DRIVER', 'DXE_DRIVER', 'UEFI_DRIVER']: elif Module.ModuleType in ['BS_DRIVER', 'DXE_DRIVER', 'UEFI_DRIVER']:
@ -1354,21 +1354,21 @@ class Build():
for PcdInfo in PcdTable: for PcdInfo in PcdTable:
ReturnValue = 0 ReturnValue = 0
if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE: if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize / 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE: elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize / 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE: elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize / 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0: elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize/0x1000)) ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize / 0x1000))
if ReturnValue != 0: if ReturnValue != 0:
EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo) EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)
MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize/0x1000)) MapBuffer.write('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize / 0x1000))
MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize/0x1000)) MapBuffer.write('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize / 0x1000))
MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize/0x1000)) MapBuffer.write('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize / 0x1000))
if len (SmmModuleList) > 0: if len (SmmModuleList) > 0:
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000)) MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize / 0x1000))
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
BtBaseAddr = TopMemoryAddress - RtSize BtBaseAddr = TopMemoryAddress - RtSize
@ -1377,7 +1377,7 @@ class Build():
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0) self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset = False, ModeIsSmm = True) self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)
MapBuffer.write('\n\n') MapBuffer.write('\n\n')
sys.stdout.write ("\n") sys.stdout.write ("\n")
sys.stdout.flush() sys.stdout.flush()
@ -1395,7 +1395,7 @@ class Build():
SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False) SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False)
MapBuffer.close() MapBuffer.close()
if self.LoadFixAddress != 0: if self.LoadFixAddress != 0:
sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" %(MapFilePath)) sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))
sys.stdout.flush() sys.stdout.flush()
## Build active platform for different build targets and different tool chains ## Build active platform for different build targets and different tool chains
@ -1529,7 +1529,7 @@ class Build():
BUILD_ERROR, BUILD_ERROR,
"Module for [%s] is not a component of active platform."\ "Module for [%s] is not a component of active platform."\
" Please make sure that the ARCH and inf file path are"\ " Please make sure that the ARCH and inf file path are"\
" given in the same as in [%s]" %\ " given in the same as in [%s]" % \
(', '.join(Wa.ArchList), self.PlatformFile), (', '.join(Wa.ArchList), self.PlatformFile),
ExtraData=self.ModuleFile ExtraData=self.ModuleFile
) )
@ -1874,8 +1874,8 @@ def SingleCheckCallback(option, opt_str, value, parser):
# @retval Args Target of build command # @retval Args Target of build command
# #
def MyOptionParser(): def MyOptionParser():
Parser = OptionParser(description=__copyright__,version=__version__,prog="build.exe",usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]") Parser = OptionParser(description=__copyright__, version=__version__, prog="build.exe", usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]")
Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32','X64','IPF','EBC','ARM', 'AARCH64'], dest="TargetArch", Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32', 'X64', 'IPF', 'EBC', 'ARM', 'AARCH64'], dest="TargetArch",
help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.") help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.")
Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback, Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback,
help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.") help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.")
@ -1917,7 +1917,7 @@ def MyOptionParser():
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".") Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.") Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.")
Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD','LIBRARY','FLASH','DEPEX','BUILD_FLAGS','FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[], Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD', 'LIBRARY', 'FLASH', 'DEPEX', 'BUILD_FLAGS', 'FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[],
help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, EXECUTION_ORDER]. "\ help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, EXECUTION_ORDER]. "\
"To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS]") "To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS]")
Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag", Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",
@ -1929,7 +1929,7 @@ def MyOptionParser():
Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.") Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.")
Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files") Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
(Opt, Args)=Parser.parse_args() (Opt, Args) = Parser.parse_args()
return (Opt, Args) return (Opt, Args)
## Tool entrance method ## Tool entrance method
@ -1985,13 +1985,13 @@ def Main():
Target = "all" Target = "all"
elif len(Target) >= 2: elif len(Target) >= 2:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.", EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.",
ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
else: else:
Target = Target[0].lower() Target = Target[0].lower()
if Target not in gSupportedTarget: if Target not in gSupportedTarget:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target, EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target,
ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
# #
# Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH # Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH
@ -2069,7 +2069,7 @@ def Main():
if Option != None and Option.debug != None: if Option != None and Option.debug != None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
else: else:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID ReturnCode = FORMAT_INVALID
except KeyboardInterrupt: except KeyboardInterrupt:
ReturnCode = ABORT_ERROR ReturnCode = ABORT_ERROR
@ -2110,7 +2110,7 @@ def Main():
BuildDuration = time.gmtime(int(round(FinishTime - StartTime))) BuildDuration = time.gmtime(int(round(FinishTime - StartTime)))
BuildDurationStr = "" BuildDurationStr = ""
if BuildDuration.tm_yday > 1: if BuildDuration.tm_yday > 1:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)"%(BuildDuration.tm_yday - 1) BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)
else: else:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)
if MyBuild != None: if MyBuild != None: