BaseTools: Adjust the spaces around commas and colons

Based on "futurize -f lib2to3.fixes.fix_ws_comma"

Contributed-under: TianoCore Contribution Agreement 1.1
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Cc: Liming Gao <liming.gao@intel.com>
Signed-off-by: Gary Lin <glin@suse.com>
Reviewed-by: Yonghong Zhu <yonghong.zhu@intel.com>
This commit is contained in:
Gary Lin
2018-06-25 18:31:33 +08:00
committed by Yonghong Zhu
parent df29fd130a
commit ccaa7754a2
57 changed files with 543 additions and 543 deletions

View File

@ -48,7 +48,7 @@ def ConvertCygPathToDos(CygPath):
DosPath = CygPath DosPath = CygPath
# pipes.quote will add the extra \\ for us. # pipes.quote will add the extra \\ for us.
return DosPath.replace('/','\\') return DosPath.replace('/', '\\')
# we receive our options as a list, but we will be passing them to the shell as a line # we receive our options as a list, but we will be passing them to the shell as a line

View File

@ -41,13 +41,13 @@ if __name__ == '__main__':
return Value return Value
def ValidatePcdName (Argument): def ValidatePcdName (Argument):
if re.split ('[a-zA-Z\_][a-zA-Z0-9\_]*\.[a-zA-Z\_][a-zA-Z0-9\_]*', Argument) != ['','']: if re.split ('[a-zA-Z\_][a-zA-Z0-9\_]*\.[a-zA-Z\_][a-zA-Z0-9\_]*', Argument) != ['', '']:
Message = '{Argument} is not in the form <PcdTokenSpaceGuidCName>.<PcdCName>'.format (Argument = Argument) Message = '{Argument} is not in the form <PcdTokenSpaceGuidCName>.<PcdCName>'.format (Argument = Argument)
raise argparse.ArgumentTypeError (Message) raise argparse.ArgumentTypeError (Message)
return Argument return Argument
def ValidateGuidName (Argument): def ValidateGuidName (Argument):
if re.split ('[a-zA-Z\_][a-zA-Z0-9\_]*', Argument) != ['','']: if re.split ('[a-zA-Z\_][a-zA-Z0-9\_]*', Argument) != ['', '']:
Message = '{Argument} is not a valid GUID C name'.format (Argument = Argument) Message = '{Argument} is not a valid GUID C name'.format (Argument = Argument)
raise argparse.ArgumentTypeError (Message) raise argparse.ArgumentTypeError (Message)
return Argument return Argument
@ -84,7 +84,7 @@ if __name__ == '__main__':
help = "Output filename for PCD value or PCD statement") help = "Output filename for PCD value or PCD statement")
parser.add_argument ("-p", "--pcd", dest = 'PcdName', type = ValidatePcdName, parser.add_argument ("-p", "--pcd", dest = 'PcdName', type = ValidatePcdName,
help = "Name of the PCD in the form <PcdTokenSpaceGuidCName>.<PcdCName>") help = "Name of the PCD in the form <PcdTokenSpaceGuidCName>.<PcdCName>")
parser.add_argument ("-t", "--type", dest = 'PcdType', default = None, choices = ['VPD','HII'], parser.add_argument ("-t", "--type", dest = 'PcdType', default = None, choices = ['VPD', 'HII'],
help = "PCD statement type (HII or VPD). Default is standard.") help = "PCD statement type (HII or VPD). Default is standard.")
parser.add_argument ("-m", "--max-size", dest = 'MaxSize', type = ValidateUnsignedInteger, parser.add_argument ("-m", "--max-size", dest = 'MaxSize', type = ValidateUnsignedInteger,
help = "Maximum size of the PCD. Ignored with --type HII.") help = "Maximum size of the PCD. Ignored with --type HII.")

View File

@ -62,7 +62,7 @@ def FormatFilesInDir(DirPath, ExtList, Args):
FormatFile(File, Args) FormatFile(File, Args)
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(prog=__prog__,description=__description__ + __copyright__, conflict_handler = 'resolve') parser = argparse.ArgumentParser(prog=__prog__, description=__description__ + __copyright__, conflict_handler = 'resolve')
parser.add_argument('Path', nargs='+', parser.add_argument('Path', nargs='+',
help='the path for files to be converted.It could be directory or file path.') help='the path for files to be converted.It could be directory or file path.')

View File

@ -191,7 +191,7 @@ def processLine(newline):
driverPrefixLen = len("Driver - ") driverPrefixLen = len("Driver - ")
# get driver name # get driver name
if cmp(newline[0:driverPrefixLen],"Driver - ") == 0 : if cmp(newline[0:driverPrefixLen], "Driver - ") == 0 :
driverlineList = newline.split(" ") driverlineList = newline.split(" ")
driverName = driverlineList[2] driverName = driverlineList[2]
#print "Checking : ", driverName #print "Checking : ", driverName
@ -214,7 +214,7 @@ def processLine(newline):
else : else :
symbolsFile.symbolsTable[driverName].parse_debug_file (driverName, pdbName) symbolsFile.symbolsTable[driverName].parse_debug_file (driverName, pdbName)
elif cmp(newline,"") == 0 : elif cmp(newline, "") == 0 :
driverName = "" driverName = ""
# check entry line # check entry line
@ -227,7 +227,7 @@ def processLine(newline):
rvaName = "" rvaName = ""
symbolName = "" symbolName = ""
if cmp(rvaName,"") == 0 : if cmp(rvaName, "") == 0 :
return newline return newline
else : else :
return newline + symbolName return newline + symbolName

View File

@ -66,7 +66,7 @@ class Page(BaseDoxygeItem):
def AddSection(self, section): def AddSection(self, section):
self.mSections.append(section) self.mSections.append(section)
self.mSections.sort(cmp=lambda x,y: cmp(x.mName.lower(), y.mName.lower())) self.mSections.sort(cmp=lambda x, y: cmp(x.mName.lower(), y.mName.lower()))
def Generate(self): def Generate(self):
if self.mIsMainPage: if self.mIsMainPage:
@ -91,7 +91,7 @@ class Page(BaseDoxygeItem):
self.mText.insert(endIndex, '<ul>') self.mText.insert(endIndex, '<ul>')
endIndex += 1 endIndex += 1
if self.mIsSort: if self.mIsSort:
self.mSubPages.sort(cmp=lambda x,y: cmp(x.mName.lower(), y.mName.lower())) self.mSubPages.sort(cmp=lambda x, y: cmp(x.mName.lower(), y.mName.lower()))
for page in self.mSubPages: for page in self.mSubPages:
self.mText.insert(endIndex, '<li>\subpage %s \"%s\" </li>' % (page.mTag, page.mName)) self.mText.insert(endIndex, '<li>\subpage %s \"%s\" </li>' % (page.mTag, page.mName))
endIndex += 1 endIndex += 1

View File

@ -285,7 +285,7 @@ class GitDiffCheck:
if self.state == START: if self.state == START:
if line.startswith('diff --git'): if line.startswith('diff --git'):
self.state = PRE_PATCH self.state = PRE_PATCH
self.filename = line[13:].split(' ',1)[0] self.filename = line[13:].split(' ', 1)[0]
self.is_newfile = False self.is_newfile = False
self.force_crlf = not self.filename.endswith('.sh') self.force_crlf = not self.filename.endswith('.sh')
elif len(line.rstrip()) != 0: elif len(line.rstrip()) != 0:

View File

@ -149,7 +149,7 @@ if __name__ == '__main__':
for Item in gArgs.Define: for Item in gArgs.Define:
if '=' not in Item[0]: if '=' not in Item[0]:
continue continue
Item = Item[0].split('=',1) Item = Item[0].split('=', 1)
CommandLine.append('%s="%s"' % (Item[0], Item[1])) CommandLine.append('%s="%s"' % (Item[0], Item[1]))
CommandLine.append('EXTRA_FLAGS="%s"' % (gArgs.Remaining)) CommandLine.append('EXTRA_FLAGS="%s"' % (gArgs.Remaining))
CommandLine.append(gArgs.BuildType) CommandLine.append(gArgs.BuildType)

View File

@ -48,7 +48,7 @@ from Common.MultipleWorkspace import MultipleWorkspace as mws
import InfSectionParser import InfSectionParser
import datetime import datetime
import hashlib import hashlib
from GenVar import VariableMgr,var_info from GenVar import VariableMgr, var_info
from collections import OrderedDict from collections import OrderedDict
from collections import defaultdict from collections import defaultdict
from Workspace.WorkspaceCommon import OrderedListDict from Workspace.WorkspaceCommon import OrderedListDict
@ -1293,7 +1293,7 @@ class PlatformAutoGen(AutoGen):
ShareFixedAtBuildPcdsSameValue = {} ShareFixedAtBuildPcdsSameValue = {}
for Module in LibAuto._ReferenceModules: for Module in LibAuto._ReferenceModules:
for Pcd in Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds: for Pcd in Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName)) key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if key not in FixedAtBuildPcds: if key not in FixedAtBuildPcds:
ShareFixedAtBuildPcdsSameValue[key] = True ShareFixedAtBuildPcdsSameValue[key] = True
FixedAtBuildPcds[key] = Pcd.DefaultValue FixedAtBuildPcds[key] = Pcd.DefaultValue
@ -1301,11 +1301,11 @@ class PlatformAutoGen(AutoGen):
if FixedAtBuildPcds[key] != Pcd.DefaultValue: if FixedAtBuildPcds[key] != Pcd.DefaultValue:
ShareFixedAtBuildPcdsSameValue[key] = False ShareFixedAtBuildPcdsSameValue[key] = False
for Pcd in LibAuto.FixedAtBuildPcds: for Pcd in LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName)) key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if (Pcd.TokenCName,Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict: if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
continue continue
else: else:
DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)] DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD: if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
continue continue
if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]: if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
@ -1325,12 +1325,12 @@ class PlatformAutoGen(AutoGen):
break break
VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(),self.DscBuildDataObj._GetSkuIds()) VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj._GetSkuIds())
VariableInfo.SetVpdRegionMaxSize(VpdRegionSize) VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)
VariableInfo.SetVpdRegionOffset(VpdRegionBase) VariableInfo.SetVpdRegionOffset(VpdRegionBase)
Index = 0 Index = 0
for Pcd in DynamicPcdSet: for Pcd in DynamicPcdSet:
pcdname = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName)) pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
for SkuName in Pcd.SkuInfoList: for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName] Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId SkuId = Sku.SkuId
@ -1340,11 +1340,11 @@ class PlatformAutoGen(AutoGen):
VariableGuidStructure = Sku.VariableGuidValue VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure) VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)
for StorageName in Sku.DefaultStoreDict: for StorageName in Sku.DefaultStoreDict:
VariableInfo.append_variable(var_info(Index,pcdname,StorageName,SkuName, StringToArray(Sku.VariableName),VariableGuid, Sku.VariableOffset, Sku.VariableAttribute , Sku.HiiDefaultValue,Sku.DefaultStoreDict[StorageName],Pcd.DatumType)) VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName], Pcd.DatumType))
Index += 1 Index += 1
return VariableInfo return VariableInfo
def UpdateNVStoreMaxSize(self,OrgVpdFile): def UpdateNVStoreMaxSize(self, OrgVpdFile):
if self.VariableInfo: if self.VariableInfo:
VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid) VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"] PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
@ -1357,7 +1357,7 @@ class PlatformAutoGen(AutoGen):
else: else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath) EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
NvStoreOffset = int(NvStoreOffset,16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset) NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)
default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT) default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(",")) maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))
var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize) var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)
@ -1569,7 +1569,7 @@ class PlatformAutoGen(AutoGen):
VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
#Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer
PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer","gEfiMdeModulePkgTokenSpaceGuid")) PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))
if PcdNvStoreDfBuffer: if PcdNvStoreDfBuffer:
self.VariableInfo = self.CollectVariables(self._DynamicPcdList) self.VariableInfo = self.CollectVariables(self._DynamicPcdList)
vardump = self.VariableInfo.dump() vardump = self.VariableInfo.dump()
@ -1595,10 +1595,10 @@ class PlatformAutoGen(AutoGen):
PcdValue = DefaultSku.DefaultValue PcdValue = DefaultSku.DefaultValue
if PcdValue not in SkuValueMap: if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = [] SkuValueMap[PcdValue] = []
VpdFile.Add(Pcd, TAB_DEFAULT,DefaultSku.VpdOffset) VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)
SkuValueMap[PcdValue].append(DefaultSku) SkuValueMap[PcdValue].append(DefaultSku)
for (SkuName,Sku) in Pcd.SkuInfoList.items(): for (SkuName, Sku) in Pcd.SkuInfoList.items():
Sku.VpdOffset = Sku.VpdOffset.strip() Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue PcdValue = Sku.DefaultValue
if PcdValue == "": if PcdValue == "":
@ -1624,7 +1624,7 @@ class PlatformAutoGen(AutoGen):
EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment)) EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))
if PcdValue not in SkuValueMap: if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = [] SkuValueMap[PcdValue] = []
VpdFile.Add(Pcd, SkuName,Sku.VpdOffset) VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku) SkuValueMap[PcdValue].append(Sku)
# if the offset of a VPD is *, then it need to be fixed up by third party tool. # if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*": if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
@ -1656,9 +1656,9 @@ class PlatformAutoGen(AutoGen):
SkuObjList = DscPcdEntry.SkuInfoList.items() SkuObjList = DscPcdEntry.SkuInfoList.items()
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT) DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku: if DefaultSku:
defaultindex = SkuObjList.index((TAB_DEFAULT,DefaultSku)) defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
SkuObjList[0],SkuObjList[defaultindex] = SkuObjList[defaultindex],SkuObjList[0] SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
for (SkuName,Sku) in SkuObjList: for (SkuName, Sku) in SkuObjList:
Sku.VpdOffset = Sku.VpdOffset.strip() Sku.VpdOffset = Sku.VpdOffset.strip()
# Need to iterate DEC pcd information to get the value & datumtype # Need to iterate DEC pcd information to get the value & datumtype
@ -1708,7 +1708,7 @@ class PlatformAutoGen(AutoGen):
EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment)) EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))
if PcdValue not in SkuValueMap: if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = [] SkuValueMap[PcdValue] = []
VpdFile.Add(DscPcdEntry, SkuName,Sku.VpdOffset) VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku) SkuValueMap[PcdValue].append(Sku)
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*": if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
NeedProcessVpdMapFile = True NeedProcessVpdMapFile = True
@ -1774,17 +1774,17 @@ class PlatformAutoGen(AutoGen):
self._DynamicPcdList.extend(list(UnicodePcdArray)) self._DynamicPcdList.extend(list(UnicodePcdArray))
self._DynamicPcdList.extend(list(HiiPcdArray)) self._DynamicPcdList.extend(list(HiiPcdArray))
self._DynamicPcdList.extend(list(OtherPcdArray)) self._DynamicPcdList.extend(list(OtherPcdArray))
allskuset = [(SkuName,Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName,Sku) in pcd.SkuInfoList.items()] allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
for pcd in self._DynamicPcdList: for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1: if len(pcd.SkuInfoList) == 1:
for (SkuName,SkuId) in allskuset: for (SkuName, SkuId) in allskuset:
if type(SkuId) in (str,unicode) and eval(SkuId) == 0 or SkuId == 0: if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:
continue continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT]) pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId pcd.SkuInfoList[SkuName].SkuId = SkuId
self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList
def FixVpdOffset(self,VpdFile ): def FixVpdOffset(self, VpdFile ):
FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY) FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
if not os.path.exists(FvPath): if not os.path.exists(FvPath):
try: try:
@ -2050,7 +2050,7 @@ class PlatformAutoGen(AutoGen):
if self._NonDynamicPcdDict: if self._NonDynamicPcdDict:
return self._NonDynamicPcdDict return self._NonDynamicPcdDict
for Pcd in self.NonDynamicPcdList: for Pcd in self.NonDynamicPcdList:
self._NonDynamicPcdDict[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)] = Pcd self._NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
return self._NonDynamicPcdDict return self._NonDynamicPcdDict
## Get list of non-dynamic PCDs ## Get list of non-dynamic PCDs
@ -3711,7 +3711,7 @@ class ModuleAutoGen(AutoGen):
try: try:
fInputfile = open(UniVfrOffsetFileName, "wb+", 0) fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
except: except:
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName,None) EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of StringIO to cache data # Use a instance of StringIO to cache data
fStringIO = StringIO('') fStringIO = StringIO('')
@ -3746,7 +3746,7 @@ class ModuleAutoGen(AutoGen):
fInputfile.write (fStringIO.getvalue()) fInputfile.write (fStringIO.getvalue())
except: except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the " EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
"file been locked or using by other applications." %UniVfrOffsetFileName,None) "file been locked or using by other applications." %UniVfrOffsetFileName, None)
fStringIO.close () fStringIO.close ()
fInputfile.close () fInputfile.close ()
@ -4181,7 +4181,7 @@ class ModuleAutoGen(AutoGen):
def CopyBinaryFiles(self): def CopyBinaryFiles(self):
for File in self.Module.Binaries: for File in self.Module.Binaries:
SrcPath = File.Path SrcPath = File.Path
DstPath = os.path.join(self.OutputDir , os.path.basename(SrcPath)) DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
CopyLongFilePath(SrcPath, DstPath) CopyLongFilePath(SrcPath, DstPath)
## Create autogen code for the module and its dependent libraries ## Create autogen code for the module and its dependent libraries
# #
@ -4331,7 +4331,7 @@ class ModuleAutoGen(AutoGen):
if SrcTimeStamp > DstTimeStamp: if SrcTimeStamp > DstTimeStamp:
return False return False
with open(self.GetTimeStampPath(),'r') as f: with open(self.GetTimeStampPath(), 'r') as f:
for source in f: for source in f:
source = source.rstrip('\n') source = source.rstrip('\n')
if not os.path.exists(source): if not os.path.exists(source):

View File

@ -949,7 +949,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n') AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName)) AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES: if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
@ -959,7 +959,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
else: else:
AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName)) AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES: if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
@ -1073,7 +1073,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
Value = eval(Value) # translate escape character Value = eval(Value) # translate escape character
ValueSize = len(Value) + 1 ValueSize = len(Value) + 1
NewValue = '{' NewValue = '{'
for Index in range(0,len(Value)): for Index in range(0, len(Value)):
if Unicode: if Unicode:
NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', ' NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', '
else: else:
@ -1119,14 +1119,14 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdDataSize = Pcd.GetPcdSize() PcdDataSize = Pcd.GetPcdSize()
if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD: if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize)) AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,FixPcdSizeTokenName)) AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (FixedPcdSizeVariableName,PcdDataSize)) AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (FixedPcdSizeVariableName, PcdDataSize))
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize)) AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,PatchPcdSizeVariableName)) AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName) AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName,PcdDataSize)) AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (PatchPcdMaxSizeVariable,Pcd.MaxDatumSize)) AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (PatchPcdMaxSizeVariable, Pcd.MaxDatumSize))
elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value)) AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName)) AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
@ -1136,13 +1136,13 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdDataSize = Pcd.GetPcdSize() PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize)) AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,PatchPcdSizeVariableName)) AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName) AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName,PcdDataSize)) AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
else: else:
PcdDataSize = Pcd.GetPcdSize() PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize)) AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,FixPcdSizeTokenName)) AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value)) AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName)) AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
@ -1249,7 +1249,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n') AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName)) AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES: if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
@ -1259,7 +1259,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
else: else:
AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName)) AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES: if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
@ -1310,11 +1310,11 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS)\n' % (SetModeStatusName, PcdVariableName)) AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS)\n' % (SetModeStatusName, PcdVariableName))
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize)) AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName,PatchPcdSizeVariableName)) AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName) AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG: if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName)) key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
if DatumType == TAB_VOID and Array == '[]': if DatumType == TAB_VOID and Array == '[]':
DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L'] DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
@ -1338,14 +1338,14 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES: if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
if ConstFixedPcd: if ConstFixedPcd:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize)) AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName,FixPcdSizeTokenName)) AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
else: else:
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName,FixedPcdSizeVariableName)) AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixedPcdSizeVariableName))
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName,FixedPcdSizeVariableName)) AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, FixedPcdSizeVariableName))
AutoGenH.Append('extern const UINTN %s; \n' % FixedPcdSizeVariableName) AutoGenH.Append('extern const UINTN %s; \n' % FixedPcdSizeVariableName)
else: else:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize)) AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName,FixPcdSizeTokenName)) AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
## Create code for library constructor ## Create code for library constructor
# #
@ -1373,11 +1373,11 @@ def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH):
elif Lib.ModuleType in SUP_MODULE_SET_PEI: elif Lib.ModuleType in SUP_MODULE_SET_PEI:
ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict)) ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict)) ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_DXE_CORE,SUP_MODULE_DXE_DRIVER,SUP_MODULE_DXE_SMM_DRIVER,SUP_MODULE_DXE_RUNTIME_DRIVER, elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER,SUP_MODULE_UEFI_DRIVER,SUP_MODULE_UEFI_APPLICATION,SUP_MODULE_SMM_CORE]: SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict)) ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict)) ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE,SUP_MODULE_MM_CORE_STANDALONE]: elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
ConstructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict)) ConstructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict)) ConstructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
@ -1402,10 +1402,10 @@ def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH):
AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict)) AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
elif Info.ModuleType in SUP_MODULE_SET_PEI: elif Info.ModuleType in SUP_MODULE_SET_PEI:
AutoGenC.Append(gLibraryString['PEI'].Replace(Dict)) AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_DXE_CORE,SUP_MODULE_DXE_DRIVER,SUP_MODULE_DXE_SMM_DRIVER,SUP_MODULE_DXE_RUNTIME_DRIVER, elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER,SUP_MODULE_UEFI_DRIVER,SUP_MODULE_UEFI_APPLICATION,SUP_MODULE_SMM_CORE]: SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
AutoGenC.Append(gLibraryString['DXE'].Replace(Dict)) AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE,SUP_MODULE_MM_CORE_STANDALONE]: elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
AutoGenC.Append(gLibraryString['MM'].Replace(Dict)) AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
## Create code for library destructor ## Create code for library destructor
@ -1435,11 +1435,11 @@ def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH):
elif Lib.ModuleType in SUP_MODULE_SET_PEI: elif Lib.ModuleType in SUP_MODULE_SET_PEI:
DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict)) DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict)) DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_DXE_CORE,SUP_MODULE_DXE_DRIVER,SUP_MODULE_DXE_SMM_DRIVER,SUP_MODULE_DXE_RUNTIME_DRIVER, elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER,SUP_MODULE_UEFI_DRIVER,SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]: SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict)) DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict)) DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE,SUP_MODULE_MM_CORE_STANDALONE]: elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
DestructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict)) DestructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict)) DestructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
@ -1464,10 +1464,10 @@ def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH):
AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict)) AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
elif Info.ModuleType in SUP_MODULE_SET_PEI: elif Info.ModuleType in SUP_MODULE_SET_PEI:
AutoGenC.Append(gLibraryString['PEI'].Replace(Dict)) AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_DXE_CORE,SUP_MODULE_DXE_DRIVER,SUP_MODULE_DXE_SMM_DRIVER,SUP_MODULE_DXE_RUNTIME_DRIVER, elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER,SUP_MODULE_UEFI_DRIVER,SUP_MODULE_UEFI_APPLICATION,SUP_MODULE_SMM_CORE]: SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
AutoGenC.Append(gLibraryString['DXE'].Replace(Dict)) AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE,SUP_MODULE_MM_CORE_STANDALONE]: elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
AutoGenC.Append(gLibraryString['MM'].Replace(Dict)) AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
@ -1526,7 +1526,7 @@ def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
else: else:
AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict)) AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict))
AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict)) AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER,SUP_MODULE_DXE_DRIVER,SUP_MODULE_DXE_SAL_DRIVER,SUP_MODULE_UEFI_DRIVER]: elif Info.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
if NumEntryPoints < 2: if NumEntryPoints < 2:
AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict)) AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict))
else: else:
@ -1925,7 +1925,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2]) ImageType, = struct.unpack('2s', Buffer[0:2])
if ImageType!= 'BM': # BMP file type is 'BM' if ImageType!= 'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path) EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize','bfReserved1','bfReserved2','bfOffBits','biSize','biWidth','biHeight','biPlanes','biBitCount', 'biCompression', 'biSizeImage','biXPelsPerMeter','biYPelsPerMeter','biClrUsed','biClrImportant']) BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII') BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
BmpHeader = BMP_IMAGE_HEADER._make(BMP_IMAGE_HEADER_STRUCT.unpack_from(Buffer[2:])) BmpHeader = BMP_IMAGE_HEADER._make(BMP_IMAGE_HEADER_STRUCT.unpack_from(Buffer[2:]))
# #
@ -2009,7 +2009,7 @@ def CreateHeaderCode(Info, AutoGenC, AutoGenH):
# file header # file header
AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'})) AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'}))
# header file Prologue # header file Prologue
AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-','_')})) AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-', '_')}))
AutoGenH.Append(gAutoGenHCppPrologueString) AutoGenH.Append(gAutoGenHCppPrologueString)
if Info.AutoGenVersion >= 0x00010005: if Info.AutoGenVersion >= 0x00010005:
# header files includes # header files includes
@ -2085,7 +2085,7 @@ def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer,
if Info.UnicodeFileList: if Info.UnicodeFileList:
FileName = "%sStrDefs.h" % Info.Name FileName = "%sStrDefs.h" % Info.Name
StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName})) StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-','_')})) StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-', '_')}))
CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer) CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer)
GuidMacros = [] GuidMacros = []
@ -2131,7 +2131,7 @@ def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer,
if Info.IdfFileList: if Info.IdfFileList:
FileName = "%sImgDefs.h" % Info.Name FileName = "%sImgDefs.h" % Info.Name
StringIdf.Append(gAutoGenHeaderString.Replace({'FileName':FileName})) StringIdf.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
StringIdf.Append(gAutoGenHPrologueString.Replace({'File':'IMAGEDEFS', 'Guid':Info.Guid.replace('-','_')})) StringIdf.Append(gAutoGenHPrologueString.Replace({'File':'IMAGEDEFS', 'Guid':Info.Guid.replace('-', '_')}))
CreateIdfFileCode(Info, AutoGenC, StringIdf, IdfGenCFlag, IdfGenBinBuffer) CreateIdfFileCode(Info, AutoGenC, StringIdf, IdfGenCFlag, IdfGenBinBuffer)
StringIdf.Append("\n#endif\n") StringIdf.Append("\n#endif\n")

View File

@ -745,7 +745,7 @@ cleanlib:
if CmdName == 'Trim': if CmdName == 'Trim':
SecDepsFileList.append(os.path.join('$(DEBUG_DIR)', os.path.basename(OutputFile).replace('offset', 'efi'))) SecDepsFileList.append(os.path.join('$(DEBUG_DIR)', os.path.basename(OutputFile).replace('offset', 'efi')))
if OutputFile.endswith('.ui') or OutputFile.endswith('.ver'): if OutputFile.endswith('.ui') or OutputFile.endswith('.ver'):
SecDepsFileList.append(os.path.join('$(MODULE_DIR)','$(MODULE_FILE)')) SecDepsFileList.append(os.path.join('$(MODULE_DIR)', '$(MODULE_FILE)'))
self.FfsOutputFileList.append((OutputFile, ' '.join(SecDepsFileList), SecCmdStr)) self.FfsOutputFileList.append((OutputFile, ' '.join(SecDepsFileList), SecCmdStr))
if len(SecDepsFileList) > 0: if len(SecDepsFileList) > 0:
self.ParseSecCmd(SecDepsFileList, CmdTuple) self.ParseSecCmd(SecDepsFileList, CmdTuple)
@ -867,7 +867,7 @@ cleanlib:
for Target in BuildTargets: for Target in BuildTargets:
for i, SingleCommand in enumerate(BuildTargets[Target].Commands): for i, SingleCommand in enumerate(BuildTargets[Target].Commands):
if FlagDict[Flag]['Macro'] in SingleCommand: if FlagDict[Flag]['Macro'] in SingleCommand:
BuildTargets[Target].Commands[i] = SingleCommand.replace('$(INC)','').replace(FlagDict[Flag]['Macro'], RespMacro) BuildTargets[Target].Commands[i] = SingleCommand.replace('$(INC)', '').replace(FlagDict[Flag]['Macro'], RespMacro)
return RespDict return RespDict
def ProcessBuildTargetList(self): def ProcessBuildTargetList(self):

View File

@ -574,22 +574,22 @@ def StringArrayToList(StringArray):
# #
def GetTokenTypeValue(TokenType): def GetTokenTypeValue(TokenType):
TokenTypeDict = { TokenTypeDict = {
"PCD_TYPE_SHIFT":28, "PCD_TYPE_SHIFT": 28,
"PCD_TYPE_DATA":(0x0 << 28), "PCD_TYPE_DATA": (0x0 << 28),
"PCD_TYPE_HII":(0x8 << 28), "PCD_TYPE_HII": (0x8 << 28),
"PCD_TYPE_VPD":(0x4 << 28), "PCD_TYPE_VPD": (0x4 << 28),
# "PCD_TYPE_SKU_ENABLED":(0x2 << 28), # "PCD_TYPE_SKU_ENABLED":(0x2 << 28),
"PCD_TYPE_STRING":(0x1 << 28), "PCD_TYPE_STRING": (0x1 << 28),
"PCD_DATUM_TYPE_SHIFT":24, "PCD_DATUM_TYPE_SHIFT": 24,
"PCD_DATUM_TYPE_POINTER":(0x0 << 24), "PCD_DATUM_TYPE_POINTER": (0x0 << 24),
"PCD_DATUM_TYPE_UINT8":(0x1 << 24), "PCD_DATUM_TYPE_UINT8": (0x1 << 24),
"PCD_DATUM_TYPE_UINT16":(0x2 << 24), "PCD_DATUM_TYPE_UINT16": (0x2 << 24),
"PCD_DATUM_TYPE_UINT32":(0x4 << 24), "PCD_DATUM_TYPE_UINT32": (0x4 << 24),
"PCD_DATUM_TYPE_UINT64":(0x8 << 24), "PCD_DATUM_TYPE_UINT64": (0x8 << 24),
"PCD_DATUM_TYPE_SHIFT2":20, "PCD_DATUM_TYPE_SHIFT2": 20,
"PCD_DATUM_TYPE_UINT8_BOOLEAN":(0x1 << 20 | 0x1 << 24), "PCD_DATUM_TYPE_UINT8_BOOLEAN": (0x1 << 20 | 0x1 << 24),
} }
return eval(TokenType, TokenTypeDict) return eval(TokenType, TokenTypeDict)
@ -643,7 +643,7 @@ def BuildExDataBase(Dict):
DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen) DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET'] PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
DbPcdNameOffsetTable = DbItemList(4,RawDataList = PcdNameOffsetTable) DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']) SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue) DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
@ -678,16 +678,16 @@ def BuildExDataBase(Dict):
PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable", DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
"LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable","VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable", "LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable", "VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
"SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean", "SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
"VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"] "VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable, DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable, StringTableLen, PcdTokenTable,PcdCNameTable, LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable, VariableTable, StringTableLen, PcdTokenTable, PcdCNameTable,
SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean, SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
VardefValueBoolean, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean] VardefValueBoolean, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
DbItemTotal = [DbSkuidValue, DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable, DbItemTotal = [DbSkuidValue, DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable, DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable, DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean, DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean] DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
@ -746,7 +746,7 @@ def BuildExDataBase(Dict):
DbOffset += (8 - DbOffset % 8) DbOffset += (8 - DbOffset % 8)
else: else:
assert(False) assert(False)
if isinstance(VariableRefTable[0],list): if isinstance(VariableRefTable[0], list):
DbOffset += skuindex * 4 DbOffset += skuindex * 4
skuindex += 1 skuindex += 1
if DbIndex >= InitTableNum: if DbIndex >= InitTableNum:
@ -893,54 +893,54 @@ def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH):
Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True) Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
def CreatePcdDataBase(PcdDBData): def CreatePcdDataBase(PcdDBData):
delta = {} delta = {}
for skuname,skuid in PcdDBData: for skuname, skuid in PcdDBData:
if len(PcdDBData[(skuname,skuid)][1]) != len(PcdDBData[(TAB_DEFAULT,"0")][1]): if len(PcdDBData[(skuname, skuid)][1]) != len(PcdDBData[(TAB_DEFAULT, "0")][1]):
EdkLogger.ERROR("The size of each sku in one pcd are not same") EdkLogger.ERROR("The size of each sku in one pcd are not same")
for skuname,skuid in PcdDBData: for skuname, skuid in PcdDBData:
if skuname == TAB_DEFAULT: if skuname == TAB_DEFAULT:
continue continue
delta[(skuname,skuid)] = [(index,data,hex(data)) for index,data in enumerate(PcdDBData[(skuname,skuid)][1]) if PcdDBData[(skuname,skuid)][1][index] != PcdDBData[(TAB_DEFAULT,"0")][1][index]] delta[(skuname, skuid)] = [(index, data, hex(data)) for index, data in enumerate(PcdDBData[(skuname, skuid)][1]) if PcdDBData[(skuname, skuid)][1][index] != PcdDBData[(TAB_DEFAULT, "0")][1][index]]
databasebuff = PcdDBData[(TAB_DEFAULT,"0")][0] databasebuff = PcdDBData[(TAB_DEFAULT, "0")][0]
for skuname,skuid in delta: for skuname, skuid in delta:
# 8 byte align # 8 byte align
if len(databasebuff) % 8 > 0: if len(databasebuff) % 8 > 0:
for i in range(8 - (len(databasebuff) % 8)): for i in range(8 - (len(databasebuff) % 8)):
databasebuff += pack("=B",0) databasebuff += pack("=B", 0)
databasebuff += pack('=Q', int(skuid)) databasebuff += pack('=Q', int(skuid))
databasebuff += pack('=Q', 0) databasebuff += pack('=Q', 0)
databasebuff += pack('=L', 8+8+4+4*len(delta[(skuname,skuid)])) databasebuff += pack('=L', 8+8+4+4*len(delta[(skuname, skuid)]))
for item in delta[(skuname,skuid)]: for item in delta[(skuname, skuid)]:
databasebuff += pack("=L",item[0]) databasebuff += pack("=L", item[0])
databasebuff = databasebuff[:-1] + pack("=B",item[1]) databasebuff = databasebuff[:-1] + pack("=B", item[1])
totallen = len(databasebuff) totallen = len(databasebuff)
totallenbuff = pack("=L",totallen) totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32] newbuffer = databasebuff[:32]
for i in range(4): for i in range(4):
newbuffer += totallenbuff[i] newbuffer += totallenbuff[i]
for i in range(36,totallen): for i in range(36, totallen):
newbuffer += databasebuff[i] newbuffer += databasebuff[i]
return newbuffer return newbuffer
def CreateVarCheckBin(VarCheckTab): def CreateVarCheckBin(VarCheckTab):
return VarCheckTab[(TAB_DEFAULT,"0")] return VarCheckTab[(TAB_DEFAULT, "0")]
def CreateAutoGen(PcdDriverAutoGenData): def CreateAutoGen(PcdDriverAutoGenData):
autogenC = TemplateString() autogenC = TemplateString()
for skuname,skuid in PcdDriverAutoGenData: for skuname, skuid in PcdDriverAutoGenData:
autogenC.Append("//SKUID: %s" % skuname) autogenC.Append("//SKUID: %s" % skuname)
autogenC.Append(PcdDriverAutoGenData[(skuname,skuid)][1].String) autogenC.Append(PcdDriverAutoGenData[(skuname, skuid)][1].String)
return (PcdDriverAutoGenData[(skuname,skuid)][0],autogenC) return (PcdDriverAutoGenData[(skuname, skuid)][0], autogenC)
def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform,Phase): def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
def prune_sku(pcd,skuname): def prune_sku(pcd, skuname):
new_pcd = copy.deepcopy(pcd) new_pcd = copy.deepcopy(pcd)
new_pcd.SkuInfoList = {skuname:pcd.SkuInfoList[skuname]} new_pcd.SkuInfoList = {skuname:pcd.SkuInfoList[skuname]}
new_pcd.isinit = 'INIT' new_pcd.isinit = 'INIT'
if new_pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES: if new_pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
for skuobj in pcd.SkuInfoList.values(): for skuobj in pcd.SkuInfoList.values():
if skuobj.DefaultValue: if skuobj.DefaultValue:
defaultvalue = int(skuobj.DefaultValue,16) if skuobj.DefaultValue.upper().startswith("0X") else int(skuobj.DefaultValue,10) defaultvalue = int(skuobj.DefaultValue, 16) if skuobj.DefaultValue.upper().startswith("0X") else int(skuobj.DefaultValue, 10)
if defaultvalue != 0: if defaultvalue != 0:
new_pcd.isinit = "INIT" new_pcd.isinit = "INIT"
break break
@ -951,32 +951,32 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform,Phase):
new_pcd.isinit = "UNINIT" new_pcd.isinit = "UNINIT"
return new_pcd return new_pcd
DynamicPcds = Platform.DynamicPcdList DynamicPcds = Platform.DynamicPcdList
DynamicPcdSet_Sku = {(SkuName,skuobj.SkuId):[] for pcd in DynamicPcds for (SkuName,skuobj) in pcd.SkuInfoList.items() } DynamicPcdSet_Sku = {(SkuName, skuobj.SkuId):[] for pcd in DynamicPcds for (SkuName, skuobj) in pcd.SkuInfoList.items() }
for skuname,skuid in DynamicPcdSet_Sku: for skuname, skuid in DynamicPcdSet_Sku:
DynamicPcdSet_Sku[(skuname,skuid)] = [prune_sku(pcd,skuname) for pcd in DynamicPcds] DynamicPcdSet_Sku[(skuname, skuid)] = [prune_sku(pcd, skuname) for pcd in DynamicPcds]
PcdDBData = {} PcdDBData = {}
PcdDriverAutoGenData = {} PcdDriverAutoGenData = {}
VarCheckTableData = {} VarCheckTableData = {}
if DynamicPcdSet_Sku: if DynamicPcdSet_Sku:
for skuname,skuid in DynamicPcdSet_Sku: for skuname, skuid in DynamicPcdSet_Sku:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer,VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform,DynamicPcdSet_Sku[(skuname,skuid)], Phase) AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = () final_data = ()
for item in PcdDbBuffer: for item in PcdDbBuffer:
final_data += unpack("B",item) final_data += unpack("B", item)
PcdDBData[(skuname,skuid)] = (PcdDbBuffer, final_data) PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname,skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC) PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname,skuid)] = VarCheckTab VarCheckTableData[(skuname, skuid)] = VarCheckTab
if Platform.Platform.VarCheckFlag: if Platform.Platform.VarCheckFlag:
dest = os.path.join(Platform.BuildDir, TAB_FV_DIRECTORY) dest = os.path.join(Platform.BuildDir, TAB_FV_DIRECTORY)
VarCheckTable = CreateVarCheckBin(VarCheckTableData) VarCheckTable = CreateVarCheckBin(VarCheckTableData)
VarCheckTable.dump(dest, Phase) VarCheckTable.dump(dest, Phase)
AdditionalAutoGenH, AdditionalAutoGenC = CreateAutoGen(PcdDriverAutoGenData) AdditionalAutoGenH, AdditionalAutoGenC = CreateAutoGen(PcdDriverAutoGenData)
else: else:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer,VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform,{}, Phase) AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = () final_data = ()
for item in PcdDbBuffer: for item in PcdDbBuffer:
final_data += unpack("B",item) final_data += unpack("B", item)
PcdDBData[(TAB_DEFAULT,"0")] = (PcdDbBuffer, final_data) PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData) return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
## Create PCD database in DXE or PEI phase ## Create PCD database in DXE or PEI phase
@ -1022,14 +1022,14 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['VARDEF_SKUID_' + DatumType] = [] Dict['VARDEF_SKUID_' + DatumType] = []
Dict['VARDEF_VALUE_' + DatumType] = [] Dict['VARDEF_VALUE_' + DatumType] = []
Dict['VARDEF_DB_VALUE_' + DatumType] = [] Dict['VARDEF_DB_VALUE_' + DatumType] = []
for Init in ['INIT','UNINIT']: for Init in ['INIT', 'UNINIT']:
Dict[Init+'_CNAME_DECL_' + DatumType] = [] Dict[Init+'_CNAME_DECL_' + DatumType] = []
Dict[Init+'_GUID_DECL_' + DatumType] = [] Dict[Init+'_GUID_DECL_' + DatumType] = []
Dict[Init+'_NUMSKUS_DECL_' + DatumType] = [] Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
Dict[Init+'_VALUE_' + DatumType] = [] Dict[Init+'_VALUE_' + DatumType] = []
Dict[Init+'_DB_VALUE_'+DatumType] = [] Dict[Init+'_DB_VALUE_'+DatumType] = []
for Type in ['STRING_HEAD','VPD_HEAD','VARIABLE_HEAD']: for Type in ['STRING_HEAD', 'VPD_HEAD', 'VARIABLE_HEAD']:
Dict[Type + '_CNAME_DECL'] = [] Dict[Type + '_CNAME_DECL'] = []
Dict[Type + '_GUID_DECL'] = [] Dict[Type + '_GUID_DECL'] = []
Dict[Type + '_NUMSKUS_DECL'] = [] Dict[Type + '_NUMSKUS_DECL'] = []
@ -1190,7 +1190,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['STRING_TABLE_INDEX'].append('') Dict['STRING_TABLE_INDEX'].append('')
else: else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex) Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
VarNameSize = len(VariableNameStructure.replace(',',' ').split()) VarNameSize = len(VariableNameStructure.replace(',', ' ').split())
Dict['STRING_TABLE_LENGTH'].append(VarNameSize ) Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
Dict['STRING_TABLE_VALUE'].append(VariableNameStructure) Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U') StringHeadOffsetList.append(str(StringTableSize) + 'U')
@ -1198,7 +1198,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
VarStringDbOffsetList.append(StringTableSize) VarStringDbOffsetList.append(StringTableSize)
Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList) Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
StringTableIndex += 1 StringTableIndex += 1
StringTableSize += len(VariableNameStructure.replace(',',' ').split()) StringTableSize += len(VariableNameStructure.replace(',', ' ').split())
VariableHeadStringIndex = 0 VariableHeadStringIndex = 0
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)): for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index] VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
@ -1237,7 +1237,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8): elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U") Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
elif Pcd.DatumType == "BOOLEAN": elif Pcd.DatumType == "BOOLEAN":
if eval(Sku.HiiDefaultValue) in [1,0]: if eval(Sku.HiiDefaultValue) in [1, 0]:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(str(eval(Sku.HiiDefaultValue)) + "U") Dict['VARDEF_VALUE_'+Pcd.DatumType].append(str(eval(Sku.HiiDefaultValue)) + "U")
else: else:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue) Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
@ -1287,7 +1287,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex) Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
if Sku.DefaultValue[0] == 'L': if Sku.DefaultValue[0] == 'L':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue) DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(DefaultValueBinStructure.replace(',',' ').split()) Size = len(DefaultValueBinStructure.replace(',', ' ').split())
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure) Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"': elif Sku.DefaultValue[0] == '"':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue) DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
@ -1599,7 +1599,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
# print Phase # print Phase
Buffer = BuildExDataBase(Dict) Buffer = BuildExDataBase(Dict)
return AutoGenH, AutoGenC, Buffer,VarCheckTab return AutoGenH, AutoGenC, Buffer, VarCheckTab
def GetOrderedDynamicPcdList(DynamicPcdList, PcdTokenNumberList): def GetOrderedDynamicPcdList(DynamicPcdList, PcdTokenNumberList):
ReorderedDyPcdList = [None for i in range(len(DynamicPcdList))] ReorderedDyPcdList = [None for i in range(len(DynamicPcdList))]

View File

@ -14,7 +14,7 @@
# # # #
# Import Modules # Import Modules
# #
from struct import pack,unpack from struct import pack, unpack
import collections import collections
import copy import copy
from Common.VariableAttributes import VariableAttributes from Common.VariableAttributes import VariableAttributes
@ -27,7 +27,7 @@ NvStorageHeaderSize = 28
VariableHeaderSize = 32 VariableHeaderSize = 32
class VariableMgr(object): class VariableMgr(object):
def __init__(self, DefaultStoreMap,SkuIdMap): def __init__(self, DefaultStoreMap, SkuIdMap):
self.VarInfo = [] self.VarInfo = []
self.DefaultStoreMap = DefaultStoreMap self.DefaultStoreMap = DefaultStoreMap
self.SkuIdMap = SkuIdMap self.SkuIdMap = SkuIdMap
@ -37,19 +37,19 @@ class VariableMgr(object):
self.VarDefaultBuff = None self.VarDefaultBuff = None
self.VarDeltaBuff = None self.VarDeltaBuff = None
def append_variable(self,uefi_var): def append_variable(self, uefi_var):
self.VarInfo.append(uefi_var) self.VarInfo.append(uefi_var)
def SetVpdRegionMaxSize(self,maxsize): def SetVpdRegionMaxSize(self, maxsize):
self.VpdRegionSize = maxsize self.VpdRegionSize = maxsize
def SetVpdRegionOffset(self,vpdoffset): def SetVpdRegionOffset(self, vpdoffset):
self.VpdRegionOffset = vpdoffset self.VpdRegionOffset = vpdoffset
def PatchNVStoreDefaultMaxSize(self,maxsize): def PatchNVStoreDefaultMaxSize(self, maxsize):
if not self.NVHeaderBuff: if not self.NVHeaderBuff:
return "" return ""
self.NVHeaderBuff = self.NVHeaderBuff[:8] + pack("=Q",maxsize) self.NVHeaderBuff = self.NVHeaderBuff[:8] + pack("=Q", maxsize)
default_var_bin = VariableMgr.format_data(self.NVHeaderBuff + self.VarDefaultBuff + self.VarDeltaBuff) default_var_bin = VariableMgr.format_data(self.NVHeaderBuff + self.VarDefaultBuff + self.VarDeltaBuff)
value_str = "{" value_str = "{"
default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin] default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
@ -59,9 +59,9 @@ class VariableMgr(object):
def combine_variable(self): def combine_variable(self):
indexedvarinfo = collections.OrderedDict() indexedvarinfo = collections.OrderedDict()
for item in self.VarInfo: for item in self.VarInfo:
if (item.skuname,item.defaultstoragename, item.var_name,item.var_guid) not in indexedvarinfo: if (item.skuname, item.defaultstoragename, item.var_name, item.var_guid) not in indexedvarinfo:
indexedvarinfo[(item.skuname,item.defaultstoragename, item.var_name,item.var_guid) ] = [] indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid) ] = []
indexedvarinfo[(item.skuname,item.defaultstoragename, item.var_name,item.var_guid)].append(item) indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid)].append(item)
for key in indexedvarinfo: for key in indexedvarinfo:
sku_var_info_offset_list = indexedvarinfo[key] sku_var_info_offset_list = indexedvarinfo[key]
if len(sku_var_info_offset_list) == 1: if len(sku_var_info_offset_list) == 1:
@ -74,15 +74,15 @@ class VariableMgr(object):
data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[data_type]] data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[data_type]]
data = value_list[0] data = value_list[0]
value_list = [] value_list = []
for data_byte in pack(data_flag,int(data,16) if data.upper().startswith('0X') else int(data)): for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
value_list.append(hex(unpack("B",data_byte)[0])) value_list.append(hex(unpack("B", data_byte)[0]))
newvalue[int(item.var_offset,16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list
try: try:
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}" newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"
except: except:
EdkLogger.error("build", AUTOGEN_ERROR, "Variable offset conflict in PCDs: %s \n" % (" and ".join(item.pcdname for item in sku_var_info_offset_list))) EdkLogger.error("build", AUTOGEN_ERROR, "Variable offset conflict in PCDs: %s \n" % (" and ".join(item.pcdname for item in sku_var_info_offset_list)))
n = sku_var_info_offset_list[0] n = sku_var_info_offset_list[0]
indexedvarinfo[key] = [var_info(n.pcdindex,n.pcdname,n.defaultstoragename,n.skuname,n.var_name, n.var_guid, "0x00",n.var_attribute,newvaluestr , newvaluestr , DataType.TAB_VOID)] indexedvarinfo[key] = [var_info(n.pcdindex, n.pcdname, n.defaultstoragename, n.skuname, n.var_name, n.var_guid, "0x00", n.var_attribute, newvaluestr, newvaluestr, DataType.TAB_VOID)]
self.VarInfo = [item[0] for item in indexedvarinfo.values()] self.VarInfo = [item[0] for item in indexedvarinfo.values()]
@staticmethod @staticmethod
@ -105,7 +105,7 @@ class VariableMgr(object):
for item in self.VarInfo: for item in self.VarInfo:
if item.pcdindex not in indexedvarinfo: if item.pcdindex not in indexedvarinfo:
indexedvarinfo[item.pcdindex] = dict() indexedvarinfo[item.pcdindex] = dict()
indexedvarinfo[item.pcdindex][(item.skuname,item.defaultstoragename)] = item indexedvarinfo[item.pcdindex][(item.skuname, item.defaultstoragename)] = item
for index in indexedvarinfo: for index in indexedvarinfo:
sku_var_info = indexedvarinfo[index] sku_var_info = indexedvarinfo[index]
@ -113,40 +113,40 @@ class VariableMgr(object):
default_data_buffer = "" default_data_buffer = ""
others_data_buffer = "" others_data_buffer = ""
tail = None tail = None
default_sku_default = indexedvarinfo[index].get((DataType.TAB_DEFAULT,DataType.TAB_DEFAULT_STORES_DEFAULT)) default_sku_default = indexedvarinfo[index].get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT))
if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES: if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
var_max_len = max(len(var_item.default_value.split(",")) for var_item in sku_var_info.values()) var_max_len = max(len(var_item.default_value.split(",")) for var_item in sku_var_info.values())
if len(default_sku_default.default_value.split(",")) < var_max_len: if len(default_sku_default.default_value.split(",")) < var_max_len:
tail = ",".join("0x00" for i in range(var_max_len-len(default_sku_default.default_value.split(",")))) tail = ",".join("0x00" for i in range(var_max_len-len(default_sku_default.default_value.split(","))))
default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value,default_sku_default.data_type,tail) default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)
default_data_array = () default_data_array = ()
for item in default_data_buffer: for item in default_data_buffer:
default_data_array += unpack("B",item) default_data_array += unpack("B", item)
var_data[(DataType.TAB_DEFAULT,DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer,sku_var_info[(DataType.TAB_DEFAULT,DataType.TAB_DEFAULT_STORES_DEFAULT)]) var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
for (skuid,defaultstoragename) in indexedvarinfo[index]: for (skuid, defaultstoragename) in indexedvarinfo[index]:
tail = None tail = None
if (skuid,defaultstoragename) == (DataType.TAB_DEFAULT,DataType.TAB_DEFAULT_STORES_DEFAULT): if (skuid, defaultstoragename) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue continue
other_sku_other = indexedvarinfo[index][(skuid,defaultstoragename)] other_sku_other = indexedvarinfo[index][(skuid, defaultstoragename)]
if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES: if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
if len(other_sku_other.default_value.split(",")) < var_max_len: if len(other_sku_other.default_value.split(",")) < var_max_len:
tail = ",".join("0x00" for i in range(var_max_len-len(other_sku_other.default_value.split(",")))) tail = ",".join("0x00" for i in range(var_max_len-len(other_sku_other.default_value.split(","))))
others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value,other_sku_other.data_type,tail) others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)
others_data_array = () others_data_array = ()
for item in others_data_buffer: for item in others_data_buffer:
others_data_array += unpack("B",item) others_data_array += unpack("B", item)
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array) data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
var_data[(skuid,defaultstoragename)][index] = (data_delta,sku_var_info[(skuid,defaultstoragename)]) var_data[(skuid, defaultstoragename)][index] = (data_delta, sku_var_info[(skuid, defaultstoragename)])
return var_data return var_data
def new_process_varinfo(self): def new_process_varinfo(self):
@ -157,17 +157,17 @@ class VariableMgr(object):
if not var_data: if not var_data:
return [] return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT,DataType.TAB_DEFAULT_STORES_DEFAULT),{}) pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
NvStoreDataBuffer = "" NvStoreDataBuffer = ""
var_data_offset = collections.OrderedDict() var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize offset = NvStorageHeaderSize
for default_data,default_info in pcds_default_data.values(): for default_data, default_info in pcds_default_data.values():
var_name_buffer = VariableMgr.PACK_VARIABLE_NAME(default_info.var_name) var_name_buffer = VariableMgr.PACK_VARIABLE_NAME(default_info.var_name)
vendorguid = default_info.var_guid.split('-') vendorguid = default_info.var_guid.split('-')
if default_info.var_attribute: if default_info.var_attribute:
var_attr_value,_ = VariableAttributes.GetVarAttributes(default_info.var_attribute) var_attr_value, _ = VariableAttributes.GetVarAttributes(default_info.var_attribute)
else: else:
var_attr_value = 0x07 var_attr_value = 0x07
@ -186,22 +186,22 @@ class VariableMgr(object):
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8) nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
data_delta_structure_buffer = "" data_delta_structure_buffer = ""
for skuname,defaultstore in var_data: for skuname, defaultstore in var_data:
if (skuname,defaultstore) == (DataType.TAB_DEFAULT,DataType.TAB_DEFAULT_STORES_DEFAULT): if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue continue
pcds_sku_data = var_data[(skuname,defaultstore)] pcds_sku_data = var_data[(skuname, defaultstore)]
delta_data_set = [] delta_data_set = []
for pcdindex in pcds_sku_data: for pcdindex in pcds_sku_data:
offset = var_data_offset[pcdindex] offset = var_data_offset[pcdindex]
delta_data,_ = pcds_sku_data[pcdindex] delta_data, _ = pcds_sku_data[pcdindex]
delta_data = [(item[0] + offset, item[1]) for item in delta_data] delta_data = [(item[0] + offset, item[1]) for item in delta_data]
delta_data_set.extend(delta_data) delta_data_set.extend(delta_data)
data_delta_structure_buffer += VariableMgr.AlignData(self.PACK_DELTA_DATA(skuname,defaultstore,delta_data_set), 8) data_delta_structure_buffer += VariableMgr.AlignData(self.PACK_DELTA_DATA(skuname, defaultstore, delta_data_set), 8)
size = len(nv_default_part + data_delta_structure_buffer) + 16 size = len(nv_default_part + data_delta_structure_buffer) + 16
maxsize = self.VpdRegionSize if self.VpdRegionSize else size maxsize = self.VpdRegionSize if self.VpdRegionSize else size
NV_Store_Default_Header = VariableMgr.PACK_NV_STORE_DEFAULT_HEADER(size,maxsize) NV_Store_Default_Header = VariableMgr.PACK_NV_STORE_DEFAULT_HEADER(size, maxsize)
self.NVHeaderBuff = NV_Store_Default_Header self.NVHeaderBuff = NV_Store_Default_Header
self.VarDefaultBuff =nv_default_part self.VarDefaultBuff =nv_default_part
@ -217,7 +217,7 @@ class VariableMgr(object):
def unpack_data(data): def unpack_data(data):
final_data = () final_data = ()
for item in data: for item in data:
final_data += unpack("B",item) final_data += unpack("B", item)
return final_data return final_data
@staticmethod @staticmethod
@ -227,7 +227,7 @@ class VariableMgr(object):
data_delta = [] data_delta = []
for i in range(len(default)): for i in range(len(default)):
if default[i] != theother[i]: if default[i] != theother[i]:
data_delta.append((i,theother[i])) data_delta.append((i, theother[i]))
return data_delta return data_delta
def dump(self): def dump(self):
@ -248,36 +248,36 @@ class VariableMgr(object):
Guid = GuidStructureStringToGuidString(Guid) Guid = GuidStructureStringToGuidString(Guid)
GuidBuffer = PackGUID(Guid.split('-')) GuidBuffer = PackGUID(Guid.split('-'))
SizeBuffer = pack('=L',size) SizeBuffer = pack('=L', size)
FormatBuffer = pack('=B',0x5A) FormatBuffer = pack('=B', 0x5A)
StateBuffer = pack('=B',0xFE) StateBuffer = pack('=B', 0xFE)
reservedBuffer = pack('=H',0) reservedBuffer = pack('=H', 0)
reservedBuffer += pack('=L',0) reservedBuffer += pack('=L', 0)
return GuidBuffer + SizeBuffer + FormatBuffer + StateBuffer + reservedBuffer return GuidBuffer + SizeBuffer + FormatBuffer + StateBuffer + reservedBuffer
@staticmethod @staticmethod
def PACK_NV_STORE_DEFAULT_HEADER(size,maxsize): def PACK_NV_STORE_DEFAULT_HEADER(size, maxsize):
Signature = pack('=B',ord('N')) Signature = pack('=B', ord('N'))
Signature += pack("=B",ord('S')) Signature += pack("=B", ord('S'))
Signature += pack("=B",ord('D')) Signature += pack("=B", ord('D'))
Signature += pack("=B",ord('B')) Signature += pack("=B", ord('B'))
SizeBuffer = pack("=L",size) SizeBuffer = pack("=L", size)
MaxSizeBuffer = pack("=Q",maxsize) MaxSizeBuffer = pack("=Q", maxsize)
return Signature + SizeBuffer + MaxSizeBuffer return Signature + SizeBuffer + MaxSizeBuffer
@staticmethod @staticmethod
def PACK_VARIABLE_HEADER(attribute,namesize,datasize,vendorguid): def PACK_VARIABLE_HEADER(attribute, namesize, datasize, vendorguid):
Buffer = pack('=H',0x55AA) # pack StartID Buffer = pack('=H', 0x55AA) # pack StartID
Buffer += pack('=B',0x3F) # pack State Buffer += pack('=B', 0x3F) # pack State
Buffer += pack('=B',0) # pack reserved Buffer += pack('=B', 0) # pack reserved
Buffer += pack('=L',attribute) Buffer += pack('=L', attribute)
Buffer += pack('=L',namesize) Buffer += pack('=L', namesize)
Buffer += pack('=L',datasize) Buffer += pack('=L', datasize)
Buffer += PackGUID(vendorguid) Buffer += PackGUID(vendorguid)
@ -289,66 +289,66 @@ class VariableMgr(object):
data_len = 0 data_len = 0
if data_type == DataType.TAB_VOID: if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","): for value_char in var_value.strip("{").strip("}").split(","):
Buffer += pack("=B",int(value_char,16)) Buffer += pack("=B", int(value_char, 16))
data_len += len(var_value.split(",")) data_len += len(var_value.split(","))
if tail: if tail:
for value_char in tail.split(","): for value_char in tail.split(","):
Buffer += pack("=B",int(value_char,16)) Buffer += pack("=B", int(value_char, 16))
data_len += len(tail.split(",")) data_len += len(tail.split(","))
elif data_type == "BOOLEAN": elif data_type == "BOOLEAN":
Buffer += pack("=B",True) if var_value.upper() == "TRUE" else pack("=B",False) Buffer += pack("=B", True) if var_value.upper() == "TRUE" else pack("=B", False)
data_len += 1 data_len += 1
elif data_type == DataType.TAB_UINT8: elif data_type == DataType.TAB_UINT8:
Buffer += pack("=B",GetIntegerValue(var_value)) Buffer += pack("=B", GetIntegerValue(var_value))
data_len += 1 data_len += 1
elif data_type == DataType.TAB_UINT16: elif data_type == DataType.TAB_UINT16:
Buffer += pack("=H",GetIntegerValue(var_value)) Buffer += pack("=H", GetIntegerValue(var_value))
data_len += 2 data_len += 2
elif data_type == DataType.TAB_UINT32: elif data_type == DataType.TAB_UINT32:
Buffer += pack("=L",GetIntegerValue(var_value)) Buffer += pack("=L", GetIntegerValue(var_value))
data_len += 4 data_len += 4
elif data_type == DataType.TAB_UINT64: elif data_type == DataType.TAB_UINT64:
Buffer += pack("=Q",GetIntegerValue(var_value)) Buffer += pack("=Q", GetIntegerValue(var_value))
data_len += 8 data_len += 8
return Buffer return Buffer
@staticmethod @staticmethod
def PACK_DEFAULT_DATA(defaultstoragename,skuid,var_value): def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
Buffer = "" Buffer = ""
Buffer += pack("=L",4+8+8) Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q",int(skuid)) Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q",int(defaultstoragename)) Buffer += pack("=Q", int(defaultstoragename))
for item in var_value: for item in var_value:
Buffer += pack("=B",item) Buffer += pack("=B", item)
Buffer = pack("=L",len(Buffer)+4) + Buffer Buffer = pack("=L", len(Buffer)+4) + Buffer
return Buffer return Buffer
def GetSkuId(self,skuname): def GetSkuId(self, skuname):
if skuname not in self.SkuIdMap: if skuname not in self.SkuIdMap:
return None return None
return self.SkuIdMap.get(skuname)[0] return self.SkuIdMap.get(skuname)[0]
def GetDefaultStoreId(self,dname): def GetDefaultStoreId(self, dname):
if dname not in self.DefaultStoreMap: if dname not in self.DefaultStoreMap:
return None return None
return self.DefaultStoreMap.get(dname)[0] return self.DefaultStoreMap.get(dname)[0]
def PACK_DELTA_DATA(self,skuname,defaultstoragename,delta_list): def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname) skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename) defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
Buffer = "" Buffer = ""
Buffer += pack("=L",4+8+8) Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q",int(skuid)) Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q",int(defaultstorageid)) Buffer += pack("=Q", int(defaultstorageid))
for (delta_offset,value) in delta_list: for (delta_offset, value) in delta_list:
Buffer += pack("=L",delta_offset) Buffer += pack("=L", delta_offset)
Buffer = Buffer[:-1] + pack("=B",value) Buffer = Buffer[:-1] + pack("=B", value)
Buffer = pack("=L",len(Buffer) + 4) + Buffer Buffer = pack("=L", len(Buffer) + 4) + Buffer
return Buffer return Buffer
@ -357,7 +357,7 @@ class VariableMgr(object):
mybuffer = data mybuffer = data
if (len(data) % align) > 0: if (len(data) % align) > 0:
for i in range(align - (len(data) % align)): for i in range(align - (len(data) % align)):
mybuffer += pack("=B",0) mybuffer += pack("=B", 0)
return mybuffer return mybuffer
@ -365,6 +365,6 @@ class VariableMgr(object):
def PACK_VARIABLE_NAME(var_name): def PACK_VARIABLE_NAME(var_name):
Buffer = "" Buffer = ""
for name_char in var_name.strip("{").strip("}").split(","): for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B",int(name_char,16)) Buffer += pack("=B", int(name_char, 16))
return Buffer return Buffer

View File

@ -94,7 +94,7 @@ PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
# @retval: The formatted hex string # @retval: The formatted hex string
# #
def DecToHexStr(Dec, Digit = 8): def DecToHexStr(Dec, Digit = 8):
return '0x{0:0{1}X}'.format(Dec,Digit) return '0x{0:0{1}X}'.format(Dec, Digit)
## Convert a dec number to a hex list ## Convert a dec number to a hex list
# #
@ -109,7 +109,7 @@ def DecToHexStr(Dec, Digit = 8):
# @retval: A list for formatted hex string # @retval: A list for formatted hex string
# #
def DecToHexList(Dec, Digit = 8): def DecToHexList(Dec, Digit = 8):
Hex = '{0:0{1}X}'.format(Dec,Digit) Hex = '{0:0{1}X}'.format(Dec, Digit)
return ["0x" + Hex[Bit:Bit + 2] for Bit in range(Digit - 2, -1, -2)] return ["0x" + Hex[Bit:Bit + 2] for Bit in range(Digit - 2, -1, -2)]
## Convert a acsii string to a hex list ## Convert a acsii string to a hex list

View File

@ -349,7 +349,7 @@ class GenVPD :
# #
# Enhanced for support "|" character in the string. # Enhanced for support "|" character in the string.
# #
ValueList = ['', '', '', '',''] ValueList = ['', '', '', '', '']
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$') ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(line) PtrValue = ValueRe.findall(line)
@ -399,7 +399,7 @@ class GenVPD :
count = 0 count = 0
for line in self.FileLinesList: for line in self.FileLinesList:
if line is not None : if line is not None :
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName) PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4], line[5], self.InputFileName)
# Strip the space char # Strip the space char
PCD.PcdCName = PCD.PcdCName.strip(' ') PCD.PcdCName = PCD.PcdCName.strip(' ')
PCD.SkuId = PCD.SkuId.strip(' ') PCD.SkuId = PCD.SkuId.strip(' ')
@ -513,10 +513,10 @@ class GenVPD :
index =0 index =0
for pcd in self.PcdUnknownOffsetList: for pcd in self.PcdUnknownOffsetList:
index += 1 index += 1
if pcd.PcdCName == ".".join(("gEfiMdeModulePkgTokenSpaceGuid","PcdNvStoreDefaultValueBuffer")): if pcd.PcdCName == ".".join(("gEfiMdeModulePkgTokenSpaceGuid", "PcdNvStoreDefaultValueBuffer")):
if index != len(self.PcdUnknownOffsetList): if index != len(self.PcdUnknownOffsetList):
for i in range(len(self.PcdUnknownOffsetList) - index): for i in range(len(self.PcdUnknownOffsetList) - index):
self.PcdUnknownOffsetList[index+i -1 ] , self.PcdUnknownOffsetList[index+i] = self.PcdUnknownOffsetList[index+i] , self.PcdUnknownOffsetList[index+i -1] self.PcdUnknownOffsetList[index+i -1 ], self.PcdUnknownOffsetList[index+i] = self.PcdUnknownOffsetList[index+i], self.PcdUnknownOffsetList[index+i -1]
# #
# Process all Offset value are "*" # Process all Offset value are "*"
@ -597,7 +597,7 @@ class GenVPD :
eachUnfixedPcd.PcdOffset = str(hex(LastOffset)) eachUnfixedPcd.PcdOffset = str(hex(LastOffset))
eachUnfixedPcd.PcdBinOffset = LastOffset eachUnfixedPcd.PcdBinOffset = LastOffset
# Insert this pcd into fixed offset pcd list. # Insert this pcd into fixed offset pcd list.
self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount,eachUnfixedPcd) self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount, eachUnfixedPcd)
# Delete the item's offset that has been fixed and added into fixed offset list # Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(countOfUnfixedList) self.PcdUnknownOffsetList.pop(countOfUnfixedList)
@ -685,7 +685,7 @@ class GenVPD :
for eachPcd in self.PcdFixedOffsetSizeList : for eachPcd in self.PcdFixedOffsetSizeList :
# write map file # write map file
try : try :
fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId,eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue)) fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset, eachPcd.PcdSize, eachPcd.PcdUnpackValue))
except: except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None) EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)

View File

@ -531,8 +531,8 @@ PCDS_DYNAMICEX_DEFAULT = "PcdsDynamicExDefault"
PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd" PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
PCDS_DYNAMICEX_HII = "PcdsDynamicExHii" PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
SECTIONS_HAVE_ITEM_PCD_SET = {PCDS_DYNAMIC_DEFAULT.upper(),PCDS_DYNAMIC_VPD.upper(),PCDS_DYNAMIC_HII.upper(), \ SECTIONS_HAVE_ITEM_PCD_SET = {PCDS_DYNAMIC_DEFAULT.upper(), PCDS_DYNAMIC_VPD.upper(), PCDS_DYNAMIC_HII.upper(), \
PCDS_DYNAMICEX_DEFAULT.upper(),PCDS_DYNAMICEX_VPD.upper(),PCDS_DYNAMICEX_HII.upper()} PCDS_DYNAMICEX_DEFAULT.upper(), PCDS_DYNAMICEX_VPD.upper(), PCDS_DYNAMICEX_HII.upper()}
# Section allowed to have items after arch # Section allowed to have items after arch
SECTIONS_HAVE_ITEM_AFTER_ARCH_SET = {TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(), SECTIONS_HAVE_ITEM_AFTER_ARCH_SET = {TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(),
PCDS_DYNAMIC_DEFAULT.upper(), PCDS_DYNAMIC_DEFAULT.upper(),

View File

@ -569,7 +569,7 @@ class ValueExpression(BaseExpression):
IsArray = IsGuid = False IsArray = IsGuid = False
if len(Token.split(',')) == 11 and len(Token.split(',{')) == 2 \ if len(Token.split(',')) == 11 and len(Token.split(',{')) == 2 \
and len(Token.split('},')) == 1: and len(Token.split('},')) == 1:
HexLen = [11,6,6,5,4,4,4,4,4,4,6] HexLen = [11, 6, 6, 5, 4, 4, 4, 4, 4, 4, 6]
HexList= Token.split(',') HexList= Token.split(',')
if HexList[3].startswith('{') and \ if HexList[3].startswith('{') and \
not [Index for Index, Hex in enumerate(HexList) if len(Hex) > HexLen[Index]]: not [Index for Index, Hex in enumerate(HexList) if len(Hex) > HexLen[Index]]:
@ -765,7 +765,7 @@ class ValueExpression(BaseExpression):
# Parse operator # Parse operator
def _GetOperator(self): def _GetOperator(self):
self.__SkipWS() self.__SkipWS()
LegalOpLst = ['&&', '||', '!=', '==', '>=', '<='] + self.NonLetterOpLst + ['?',':'] LegalOpLst = ['&&', '||', '!=', '==', '>=', '<='] + self.NonLetterOpLst + ['?', ':']
self._Token = '' self._Token = ''
Expr = self._Expr[self._Idx:] Expr = self._Expr[self._Idx:]
@ -842,7 +842,7 @@ class ValueExpressionEx(ValueExpression):
elif Item.startswith(TAB_UINT64): elif Item.startswith(TAB_UINT64):
ItemSize = 8 ItemSize = 8
ValueType = TAB_UINT64 ValueType = TAB_UINT64
elif Item[0] in {'"',"'",'L'}: elif Item[0] in {'"', "'", 'L'}:
ItemSize = 0 ItemSize = 0
ValueType = TAB_VOID ValueType = TAB_VOID
else: else:
@ -946,7 +946,7 @@ class ValueExpressionEx(ValueExpression):
# replace each offset, except errors # replace each offset, except errors
for Offset in OffsetList: for Offset in OffsetList:
try: try:
Item = Item.replace('OFFSET_OF({})'.format(Offset),LabelDict[Offset]) Item = Item.replace('OFFSET_OF({})'.format(Offset), LabelDict[Offset])
except: except:
raise BadExpression('%s not defined' % Offset) raise BadExpression('%s not defined' % Offset)
@ -999,7 +999,7 @@ class ValueExpressionEx(ValueExpression):
Item = '0x%x' % TmpValue if type(TmpValue) != type('') else TmpValue Item = '0x%x' % TmpValue if type(TmpValue) != type('') else TmpValue
if ItemSize == 0: if ItemSize == 0:
ItemValue, ItemSize = ParseFieldValue(Item) ItemValue, ItemSize = ParseFieldValue(Item)
if Item[0] not in {'"','L','{'} and ItemSize > 1: if Item[0] not in {'"', 'L', '{'} and ItemSize > 1:
raise BadExpression("Byte array number %s should less than 0xFF." % Item) raise BadExpression("Byte array number %s should less than 0xFF." % Item)
else: else:
ItemValue = ParseFieldValue(Item)[0] ItemValue = ParseFieldValue(Item)[0]

View File

@ -132,7 +132,7 @@ def _parseForGCC(lines, efifilepath, varnames):
if Str: if Str:
m = pcdPatternGcc.match(Str.strip()) m = pcdPatternGcc.match(Str.strip())
if m is not None: if m is not None:
varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0])) varoffset.append((varname, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
if not varoffset: if not varoffset:
return [] return []
@ -1469,7 +1469,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
# Value, Size = ParseFieldValue(Value) # Value, Size = ParseFieldValue(Value)
if Size: if Size:
try: try:
int(Size,16) if Size.upper().startswith("0X") else int(Size) int(Size, 16) if Size.upper().startswith("0X") else int(Size)
except: except:
IsValid = False IsValid = False
Size = -1 Size = -1
@ -1490,7 +1490,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
if Size: if Size:
try: try:
int(Size,16) if Size.upper().startswith("0X") else int(Size) int(Size, 16) if Size.upper().startswith("0X") else int(Size)
except: except:
IsValid = False IsValid = False
Size = -1 Size = -1
@ -1512,7 +1512,7 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
IsValid = (len(FieldList) <= 3) IsValid = (len(FieldList) <= 3)
if Size: if Size:
try: try:
int(Size,16) if Size.upper().startswith("0X") else int(Size) int(Size, 16) if Size.upper().startswith("0X") else int(Size)
except: except:
IsValid = False IsValid = False
Size = -1 Size = -1
@ -1670,7 +1670,7 @@ def ConvertStringToByteArray(Value):
Value = eval(Value) # translate escape character Value = eval(Value) # translate escape character
NewValue = '{' NewValue = '{'
for Index in range(0,len(Value)): for Index in range(0, len(Value)):
if Unicode: if Unicode:
NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ',' NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
else: else:
@ -1914,28 +1914,28 @@ class PeImageClass():
return Value return Value
class DefaultStore(): class DefaultStore():
def __init__(self,DefaultStores ): def __init__(self, DefaultStores ):
self.DefaultStores = DefaultStores self.DefaultStores = DefaultStores
def DefaultStoreID(self,DefaultStoreName): def DefaultStoreID(self, DefaultStoreName):
for key,value in self.DefaultStores.items(): for key, value in self.DefaultStores.items():
if value == DefaultStoreName: if value == DefaultStoreName:
return key return key
return None return None
def GetDefaultDefault(self): def GetDefaultDefault(self):
if not self.DefaultStores or "0" in self.DefaultStores: if not self.DefaultStores or "0" in self.DefaultStores:
return "0",TAB_DEFAULT_STORES_DEFAULT return "0", TAB_DEFAULT_STORES_DEFAULT
else: else:
minvalue = min(int(value_str) for value_str in self.DefaultStores) minvalue = min(int(value_str) for value_str in self.DefaultStores)
return (str(minvalue), self.DefaultStores[str(minvalue)]) return (str(minvalue), self.DefaultStores[str(minvalue)])
def GetMin(self,DefaultSIdList): def GetMin(self, DefaultSIdList):
if not DefaultSIdList: if not DefaultSIdList:
return TAB_DEFAULT_STORES_DEFAULT return TAB_DEFAULT_STORES_DEFAULT
storeidset = {storeid for storeid, storename in self.DefaultStores.values() if storename in DefaultSIdList} storeidset = {storeid for storeid, storename in self.DefaultStores.values() if storename in DefaultSIdList}
if not storeidset: if not storeidset:
return "" return ""
minid = min(storeidset ) minid = min(storeidset )
for sid,name in self.DefaultStores.values(): for sid, name in self.DefaultStores.values():
if sid == minid: if sid == minid:
return name return name
class SkuClass(): class SkuClass():
@ -1950,7 +1950,7 @@ class SkuClass():
for SkuName in SkuIds: for SkuName in SkuIds:
SkuId = SkuIds[SkuName][0] SkuId = SkuIds[SkuName][0]
skuid_num = int(SkuId,16) if SkuId.upper().startswith("0X") else int(SkuId) skuid_num = int(SkuId, 16) if SkuId.upper().startswith("0X") else int(SkuId)
if skuid_num > 0xFFFFFFFFFFFFFFFF: if skuid_num > 0xFFFFFFFFFFFFFFFF:
EdkLogger.error("build", PARAMETER_INVALID, EdkLogger.error("build", PARAMETER_INVALID,
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64" ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
@ -2003,9 +2003,9 @@ class SkuClass():
self.__SkuInherit = {} self.__SkuInherit = {}
for item in self.SkuData.values(): for item in self.SkuData.values():
self.__SkuInherit[item[1]]=item[2] if item[2] else "DEFAULT" self.__SkuInherit[item[1]]=item[2] if item[2] else "DEFAULT"
return self.__SkuInherit.get(skuname,"DEFAULT") return self.__SkuInherit.get(skuname, "DEFAULT")
def GetSkuChain(self,sku): def GetSkuChain(self, sku):
if sku == "DEFAULT": if sku == "DEFAULT":
return ["DEFAULT"] return ["DEFAULT"]
skulist = [sku] skulist = [sku]

View File

@ -17,7 +17,7 @@ from Common.GlobalData import *
from CommonDataClass.Exceptions import BadExpression from CommonDataClass.Exceptions import BadExpression
from CommonDataClass.Exceptions import WrnExpression from CommonDataClass.Exceptions import WrnExpression
import uuid import uuid
from Common.Expression import PcdPattern,BaseExpression from Common.Expression import PcdPattern, BaseExpression
from Common.DataType import * from Common.DataType import *
ERR_STRING_EXPR = 'This operator cannot be used in string expression: [%s].' ERR_STRING_EXPR = 'This operator cannot be used in string expression: [%s].'
@ -167,7 +167,7 @@ class EQOperatorObject(object):
raise BadExpression(ERR_SNYTAX % Expr) raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1()) rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer() rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand) , int(Operand))) rangeContainer.push(RangeObject(int(Operand), int(Operand)))
SymbolTable[rangeId1] = rangeContainer SymbolTable[rangeId1] = rangeContainer
return rangeId1 return rangeId1
@ -453,7 +453,7 @@ class RangeExpression(BaseExpression):
# [!]*A # [!]*A
def _RelExpr(self): def _RelExpr(self):
if self._IsOperator({"NOT" , "LE", "GE", "LT", "GT", "EQ", "XOR"}): if self._IsOperator({"NOT", "LE", "GE", "LT", "GT", "EQ", "XOR"}):
Token = self._Token Token = self._Token
Val = self._NeExpr() Val = self._NeExpr()
try: try:

View File

@ -750,7 +750,7 @@ def SplitString(String):
# @param StringList: A list for strings to be converted # @param StringList: A list for strings to be converted
# #
def ConvertToSqlString(StringList): def ConvertToSqlString(StringList):
return map(lambda s: s.replace("'", "''") , StringList) return map(lambda s: s.replace("'", "''"), StringList)
## Convert To Sql String ## Convert To Sql String
# #

View File

@ -26,9 +26,9 @@ from Common.StringUtils import NormPath
import Common.GlobalData as GlobalData import Common.GlobalData as GlobalData
from Common import GlobalData from Common import GlobalData
from Common.MultipleWorkspace import MultipleWorkspace as mws from Common.MultipleWorkspace import MultipleWorkspace as mws
from DataType import TAB_TOD_DEFINES_TARGET,TAB_TOD_DEFINES_TOOL_CHAIN_TAG,\ from DataType import TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG,\
TAB_TOD_DEFINES_TARGET_ARCH,TAB_TOD_DEFINES_COMMAND_TYPE\ TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE\
,TAB_TOD_DEFINES_FAMILY,TAB_TOD_DEFINES_BUILDRULEFAMILY , TAB_TOD_DEFINES_FAMILY, TAB_TOD_DEFINES_BUILDRULEFAMILY
## ##

View File

@ -88,7 +88,7 @@ class VpdInfoFile:
# #
# @param offset integer value for VPD's offset in specific SKU. # @param offset integer value for VPD's offset in specific SKU.
# #
def Add(self, Vpd, skuname,Offset): def Add(self, Vpd, skuname, Offset):
if (Vpd is None): if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.") EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
@ -140,7 +140,7 @@ class VpdInfoFile:
if PcdValue == "" : if PcdValue == "" :
PcdValue = Pcd.DefaultValue PcdValue = Pcd.DefaultValue
Content += "%s.%s|%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, PcdTokenCName, skuname,str(self._VpdArray[Pcd][skuname]).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue) Content += "%s.%s|%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, PcdTokenCName, skuname, str(self._VpdArray[Pcd][skuname]).strip(), str(Pcd.MaxDatumSize).strip(), PcdValue)
i += 1 i += 1
return SaveFileOnChange(FilePath, Content, False) return SaveFileOnChange(FilePath, Content, False)
@ -169,8 +169,8 @@ class VpdInfoFile:
# the line must follow output format defined in BPDG spec. # the line must follow output format defined in BPDG spec.
# #
try: try:
PcdName, SkuId,Offset, Size, Value = Line.split("#")[0].split("|") PcdName, SkuId, Offset, Size, Value = Line.split("#")[0].split("|")
PcdName, SkuId,Offset, Size, Value = PcdName.strip(), SkuId.strip(),Offset.strip(), Size.strip(), Value.strip() PcdName, SkuId, Offset, Size, Value = PcdName.strip(), SkuId.strip(), Offset.strip(), Size.strip(), Value.strip()
TokenSpaceName, PcdTokenName = PcdName.split(".") TokenSpaceName, PcdTokenName = PcdName.split(".")
except: except:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath) EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
@ -179,7 +179,7 @@ class VpdInfoFile:
if (TokenSpaceName, PcdTokenName) not in self._VpdInfo: if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
self._VpdInfo[(TokenSpaceName, PcdTokenName)] = [] self._VpdInfo[(TokenSpaceName, PcdTokenName)] = []
self._VpdInfo[(TokenSpaceName, PcdTokenName)].append((SkuId,Offset, Value)) self._VpdInfo[(TokenSpaceName, PcdTokenName)].append((SkuId, Offset, Value))
for VpdObject in self._VpdArray: for VpdObject in self._VpdArray:
VpdObjectTokenCName = VpdObject.TokenCName VpdObjectTokenCName = VpdObject.TokenCName
for PcdItem in GlobalData.MixedPcd: for PcdItem in GlobalData.MixedPcd:

View File

@ -785,10 +785,10 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if d is not None: if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop) self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
else: else:
self.function_definition_stack[-1].ModifierText = '' self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop) self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start, declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
if a is not None: if a is not None:
@ -922,9 +922,9 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if b is not None: if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop)) self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
else: else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop)) self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
@ -959,7 +959,7 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if t is not None: if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop)) self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
@ -1403,7 +1403,7 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if s.stop is not None: if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop)) self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
@ -1418,7 +1418,7 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if e.stop is not None: if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -5401,7 +5401,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start,p.stop) self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
# C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* # C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
while True: #loop65 while True: #loop65
@ -5501,7 +5501,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start,c.stop)) self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
@ -8277,7 +8277,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -16384,7 +16384,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
self.following.append(self.FOLLOW_statement_in_selection_statement2284) self.following.append(self.FOLLOW_statement_in_selection_statement2284)
self.statement() self.statement()
@ -16503,7 +16503,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -16535,7 +16535,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -16582,7 +16582,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))

View File

@ -561,7 +561,7 @@ class InfParser(MetaFileParser):
NmakeLine = '' NmakeLine = ''
# section content # section content
self._ValueList = ['','',''] self._ValueList = ['', '', '']
# parse current line, result will be put in self._ValueList # parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self) self._SectionParser[self._SectionType](self)
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE: if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
@ -920,7 +920,7 @@ class DscParser(MetaFileParser):
## Directive statement parser ## Directive statement parser
def _DirectiveParser(self): def _DirectiveParser(self):
self._ValueList = ['','',''] self._ValueList = ['', '', '']
TokenList = GetSplitValueList(self._CurrentLine, ' ', 1) TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
self._ValueList[0:len(TokenList)] = TokenList self._ValueList[0:len(TokenList)] = TokenList
@ -1110,7 +1110,7 @@ class DscParser(MetaFileParser):
## Override parent's method since we'll do all macro replacements in parser ## Override parent's method since we'll do all macro replacements in parser
def _GetMacros(self): def _GetMacros(self):
Macros = dict( [('ARCH','IA32'), ('FAMILY','MSFT'),('TOOL_CHAIN_TAG','VS2008x86'),('TARGET','DEBUG')]) Macros = dict( [('ARCH', 'IA32'), ('FAMILY', 'MSFT'), ('TOOL_CHAIN_TAG', 'VS2008x86'), ('TARGET', 'DEBUG')])
Macros.update(self._FileLocalMacros) Macros.update(self._FileLocalMacros)
Macros.update(self._GetApplicableSectionMacro()) Macros.update(self._GetApplicableSectionMacro())
Macros.update(GlobalData.gEdkGlobal) Macros.update(GlobalData.gEdkGlobal)
@ -1225,7 +1225,7 @@ class DscParser(MetaFileParser):
self._RawTable.Drop() self._RawTable.Drop()
self._Table.Drop() self._Table.Drop()
for Record in RecordList: for Record in RecordList:
EccGlobalData.gDb.TblDsc.Insert(Record[1],Record[2],Record[3],Record[4],Record[5],Record[6],Record[7],Record[8],Record[9],Record[10],Record[11],Record[12],Record[13],Record[14]) EccGlobalData.gDb.TblDsc.Insert(Record[1], Record[2], Record[3], Record[4], Record[5], Record[6], Record[7], Record[8], Record[9], Record[10], Record[11], Record[12], Record[13], Record[14])
GlobalData.gPlatformDefines.update(self._FileLocalMacros) GlobalData.gPlatformDefines.update(self._FileLocalMacros)
self._PostProcessed = True self._PostProcessed = True
self._Content = None self._Content = None
@ -1246,7 +1246,7 @@ class DscParser(MetaFileParser):
def __RetrievePcdValue(self): def __RetrievePcdValue(self):
Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem=-1.0) Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem=-1.0)
for TokenSpaceGuid,PcdName,Value,Dummy2,Dummy3,ID,Line in Records: for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
Value, DatumType, MaxDatumSize = AnalyzePcdData(Value) Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
# Only use PCD whose value is straitforward (no macro and PCD) # Only use PCD whose value is straitforward (no macro and PCD)
if self.SymbolPattern.findall(Value): if self.SymbolPattern.findall(Value):
@ -1259,7 +1259,7 @@ class DscParser(MetaFileParser):
self._Symbols[Name] = Value self._Symbols[Name] = Value
Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0) Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
for TokenSpaceGuid,PcdName,Value,Dummy2,Dummy3,ID,Line in Records: for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
Value, DatumType, MaxDatumSize = AnalyzePcdData(Value) Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
# Only use PCD whose value is straitforward (no macro and PCD) # Only use PCD whose value is straitforward (no macro and PCD)
if self.SymbolPattern.findall(Value): if self.SymbolPattern.findall(Value):
@ -1571,7 +1571,7 @@ class DecParser(MetaFileParser):
continue continue
# section content # section content
self._ValueList = ['','',''] self._ValueList = ['', '', '']
self._SectionParser[self._SectionType[0]](self) self._SectionParser[self._SectionType[0]](self)
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE: if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1 self._ItemType = -1
@ -1717,7 +1717,7 @@ class DecParser(MetaFileParser):
GuidValue = GuidValue.lstrip(' {') GuidValue = GuidValue.lstrip(' {')
HexList.append('0x' + str(GuidValue[2:])) HexList.append('0x' + str(GuidValue[2:]))
Index += 1 Index += 1
self._ValueList[1] = "{ %s, %s, %s, { %s, %s, %s, %s, %s, %s, %s, %s }}" % (HexList[0], HexList[1], HexList[2],HexList[3],HexList[4],HexList[5],HexList[6],HexList[7],HexList[8],HexList[9],HexList[10]) self._ValueList[1] = "{ %s, %s, %s, { %s, %s, %s, %s, %s, %s, %s, %s }}" % (HexList[0], HexList[1], HexList[2], HexList[3], HexList[4], HexList[5], HexList[6], HexList[7], HexList[8], HexList[9], HexList[10])
else: else:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format", EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
ExtraData=self._CurrentLine + \ ExtraData=self._CurrentLine + \

View File

@ -785,10 +785,10 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if d is not None: if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop) self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
else: else:
self.function_definition_stack[-1].ModifierText = '' self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop) self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start, declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
if a is not None: if a is not None:
@ -922,9 +922,9 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if b is not None: if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop)) self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
else: else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop)) self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
@ -959,7 +959,7 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if t is not None: if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop)) self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
@ -1403,7 +1403,7 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if s.stop is not None: if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop)) self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
@ -1418,7 +1418,7 @@ class CParser(Parser):
if self.backtracking == 0: if self.backtracking == 0:
if e.stop is not None: if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -5401,7 +5401,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start,p.stop) self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
# C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* # C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
while True: #loop65 while True: #loop65
@ -5501,7 +5501,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start,c.stop)) self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
@ -8277,7 +8277,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -16384,7 +16384,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
self.following.append(self.FOLLOW_statement_in_selection_statement2284) self.following.append(self.FOLLOW_statement_in_selection_statement2284)
self.statement() self.statement()
@ -16503,7 +16503,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -16535,7 +16535,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
@ -16582,7 +16582,7 @@ class CParser(Parser):
if self.failed: if self.failed:
return return
if self.backtracking == 0: if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))

View File

@ -128,11 +128,11 @@ def GetIdentifierList():
for pp in FileProfile.PPDirectiveList: for pp in FileProfile.PPDirectiveList:
Type = GetIdType(pp.Content) Type = GetIdType(pp.Content)
IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0],pp.StartPos[1],pp.EndPos[0],pp.EndPos[1]) IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0], pp.StartPos[1], pp.EndPos[0], pp.EndPos[1])
IdList.append(IdPP) IdList.append(IdPP)
for ae in FileProfile.AssignmentExpressionList: for ae in FileProfile.AssignmentExpressionList:
IdAE = DataClass.IdentifierClass(-1, ae.Operator, '', ae.Name, ae.Value, DataClass.MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION, -1, -1, ae.StartPos[0],ae.StartPos[1],ae.EndPos[0],ae.EndPos[1]) IdAE = DataClass.IdentifierClass(-1, ae.Operator, '', ae.Name, ae.Value, DataClass.MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION, -1, -1, ae.StartPos[0], ae.StartPos[1], ae.EndPos[0], ae.EndPos[1])
IdList.append(IdAE) IdList.append(IdAE)
FuncDeclPattern = GetFuncDeclPattern() FuncDeclPattern = GetFuncDeclPattern()
@ -154,7 +154,7 @@ def GetIdentifierList():
var.Modifier += ' ' + FuncNamePartList[Index] var.Modifier += ' ' + FuncNamePartList[Index]
var.Declarator = var.Declarator.lstrip().lstrip(FuncNamePartList[Index]) var.Declarator = var.Declarator.lstrip().lstrip(FuncNamePartList[Index])
Index += 1 Index += 1
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, '', DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0],var.StartPos[1],var.EndPos[0],var.EndPos[1]) IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, '', DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
IdList.append(IdVar) IdList.append(IdVar)
continue continue
@ -167,7 +167,7 @@ def GetIdentifierList():
var.Modifier += ' ' + Name[LSBPos:] var.Modifier += ' ' + Name[LSBPos:]
Name = Name[0:LSBPos] Name = Name[0:LSBPos]
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0],var.StartPos[1],var.EndPos[0],var.EndPos[1]) IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
IdList.append(IdVar) IdList.append(IdVar)
else: else:
DeclList = var.Declarator.split('=') DeclList = var.Declarator.split('=')
@ -176,7 +176,7 @@ def GetIdentifierList():
LSBPos = var.Declarator.find('[') LSBPos = var.Declarator.find('[')
var.Modifier += ' ' + Name[LSBPos:] var.Modifier += ' ' + Name[LSBPos:]
Name = Name[0:LSBPos] Name = Name[0:LSBPos]
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0],var.StartPos[1],var.EndPos[0],var.EndPos[1]) IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
IdList.append(IdVar) IdList.append(IdVar)
for enum in FileProfile.EnumerationDefinitionList: for enum in FileProfile.EnumerationDefinitionList:
@ -184,7 +184,7 @@ def GetIdentifierList():
RBPos = enum.Content.find('}') RBPos = enum.Content.find('}')
Name = enum.Content[4:LBPos].strip() Name = enum.Content[4:LBPos].strip()
Value = enum.Content[LBPos+1:RBPos] Value = enum.Content[LBPos+1:RBPos]
IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0],enum.StartPos[1],enum.EndPos[0],enum.EndPos[1]) IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0], enum.StartPos[1], enum.EndPos[0], enum.EndPos[1])
IdList.append(IdEnum) IdList.append(IdEnum)
for su in FileProfile.StructUnionDefinitionList: for su in FileProfile.StructUnionDefinitionList:
@ -201,7 +201,7 @@ def GetIdentifierList():
else: else:
Name = su.Content[SkipLen:LBPos].strip() Name = su.Content[SkipLen:LBPos].strip()
Value = su.Content[LBPos+1:RBPos] Value = su.Content[LBPos+1:RBPos]
IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0],su.StartPos[1],su.EndPos[0],su.EndPos[1]) IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0], su.StartPos[1], su.EndPos[0], su.EndPos[1])
IdList.append(IdPE) IdList.append(IdPE)
TdFuncPointerPattern = GetTypedefFuncPointerPattern() TdFuncPointerPattern = GetTypedefFuncPointerPattern()
@ -224,11 +224,11 @@ def GetIdentifierList():
Name = TmpStr[0:RBPos] Name = TmpStr[0:RBPos]
Value = 'FP' + TmpStr[RBPos + 1:] Value = 'FP' + TmpStr[RBPos + 1:]
IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0],td.StartPos[1],td.EndPos[0],td.EndPos[1]) IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0], td.StartPos[1], td.EndPos[0], td.EndPos[1])
IdList.append(IdTd) IdList.append(IdTd)
for funcCall in FileProfile.FunctionCallingList: for funcCall in FileProfile.FunctionCallingList:
IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0],funcCall.StartPos[1],funcCall.EndPos[0],funcCall.EndPos[1]) IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0], funcCall.StartPos[1], funcCall.EndPos[0], funcCall.EndPos[1])
IdList.append(IdFC) IdList.append(IdFC)
return IdList return IdList
@ -330,7 +330,7 @@ def GetFunctionList():
FuncDef.Modifier += ' ' + FuncNamePartList[Index] FuncDef.Modifier += ' ' + FuncNamePartList[Index]
Index += 1 Index += 1
FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0],FuncDef.StartPos[1],FuncDef.EndPos[0],FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, []) FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0], FuncDef.StartPos[1], FuncDef.EndPos[0], FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, [])
FuncObjList.append(FuncObj) FuncObjList.append(FuncObj)
return FuncObjList return FuncObjList

View File

@ -22,7 +22,7 @@ import FfsFileStatement
from GenFdsGlobalVariable import GenFdsGlobalVariable from GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import AprioriSectionClassObject from CommonDataClass.FdfClass import AprioriSectionClassObject
from Common.StringUtils import * from Common.StringUtils import *
from Common.Misc import SaveFileOnChange,PathClass from Common.Misc import SaveFileOnChange, PathClass
from Common import EdkLogger from Common import EdkLogger
from Common.BuildToolError import * from Common.BuildToolError import *
from Common.DataType import TAB_COMMON from Common.DataType import TAB_COMMON

View File

@ -207,7 +207,7 @@ class CapsulePayload(CapsuleData):
# #
Guid = self.ImageTypeId.split('-') Guid = self.ImageTypeId.split('-')
Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',
int(self.Version,16), int(self.Version, 16),
int(Guid[0], 16), int(Guid[0], 16),
int(Guid[1], 16), int(Guid[1], 16),
int(Guid[2], 16), int(Guid[2], 16),

View File

@ -133,7 +133,7 @@ class EfiSection (EfiSectionClassObject):
elif FileList != []: elif FileList != []:
for File in FileList: for File in FileList:
Index = Index + 1 Index = Index + 1
Num = '%s.%d' %(SecNum , Index) Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType)) OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType))
f = open(File, 'r') f = open(File, 'r')
VerString = f.read() VerString = f.read()
@ -192,7 +192,7 @@ class EfiSection (EfiSectionClassObject):
elif FileList != []: elif FileList != []:
for File in FileList: for File in FileList:
Index = Index + 1 Index = Index + 1
Num = '%s.%d' %(SecNum , Index) Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType)) OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType))
f = open(File, 'r') f = open(File, 'r')
UiString = f.read() UiString = f.read()
@ -237,7 +237,7 @@ class EfiSection (EfiSectionClassObject):
for File in FileList: for File in FileList:
""" Copy Map file to FFS output path """ """ Copy Map file to FFS output path """
Index = Index + 1 Index = Index + 1
Num = '%s.%d' %(SecNum , Index) Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType)) OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType))
File = GenFdsGlobalVariable.MacroExtend(File, Dict) File = GenFdsGlobalVariable.MacroExtend(File, Dict)

View File

@ -137,7 +137,7 @@ class FD(FDClassObject):
# Call each region's AddToBuffer function # Call each region's AddToBuffer function
# #
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function') GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFds.ImageBinDict, self.vtfRawDict, self.DefineVarDict,Flag=Flag) RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFds.ImageBinDict, self.vtfRawDict, self.DefineVarDict, Flag=Flag)
# #
# Write the buffer contents to Fd file # Write the buffer contents to Fd file
# #
@ -163,7 +163,7 @@ class FD(FDClassObject):
if len(RegionObj.RegionDataList) == 1: if len(RegionObj.RegionDataList) == 1:
RegionData = RegionObj.RegionDataList[0] RegionData = RegionObj.RegionDataList[0]
FvList.append(RegionData.upper()) FvList.append(RegionData.upper())
FvAddDict[RegionData.upper()] = (int(self.BaseAddress,16) + \ FvAddDict[RegionData.upper()] = (int(self.BaseAddress, 16) + \
RegionObj.Offset, RegionObj.Size) RegionObj.Offset, RegionObj.Size)
else: else:
Offset = RegionObj.Offset Offset = RegionObj.Offset
@ -178,7 +178,7 @@ class FD(FDClassObject):
Size = 0 Size = 0
for blockStatement in FvObj.BlockSizeList: for blockStatement in FvObj.BlockSizeList:
Size = Size + blockStatement[0] * blockStatement[1] Size = Size + blockStatement[0] * blockStatement[1]
FvAddDict[RegionData.upper()] = (int(self.BaseAddress,16) + \ FvAddDict[RegionData.upper()] = (int(self.BaseAddress, 16) + \
Offset, Size) Offset, Size)
Offset = Offset + Size Offset = Offset + Size
# #

View File

@ -1820,7 +1820,7 @@ class FdfParser:
return long( return long(
ValueExpression(Expr, ValueExpression(Expr,
self.__CollectMacroPcd() self.__CollectMacroPcd()
)(True),0) )(True), 0)
except Exception: except Exception:
self.SetFileBufferPos(StartPos) self.SetFileBufferPos(StartPos)
return None return None
@ -2730,7 +2730,7 @@ class FdfParser:
while True: while True:
AlignValue = None AlignValue = None
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
#For FFS, Auto is default option same to "" #For FFS, Auto is default option same to ""
@ -2789,7 +2789,7 @@ class FdfParser:
FfsFileObj.CheckSum = True FfsFileObj.CheckSum = True
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
#For FFS, Auto is default option same to "" #For FFS, Auto is default option same to ""
@ -2861,7 +2861,7 @@ class FdfParser:
AlignValue = None AlignValue = None
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
AlignValue = self.__Token AlignValue = self.__Token
@ -3151,7 +3151,7 @@ class FdfParser:
AlignValue = None AlignValue = None
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
AlignValue = self.__Token AlignValue = self.__Token
@ -3544,7 +3544,7 @@ class FdfParser:
AfileName = self.__Token AfileName = self.__Token
AfileBaseName = os.path.basename(AfileName) AfileBaseName = os.path.basename(AfileName)
if os.path.splitext(AfileBaseName)[1] not in [".bin",".BIN",".Bin",".dat",".DAT",".Dat",".data",".DATA",".Data"]: if os.path.splitext(AfileBaseName)[1] not in [".bin", ".BIN", ".Bin", ".dat", ".DAT", ".Dat", ".data", ".DATA", ".Data"]:
raise Warning('invalid binary file type, should be one of "bin",BINARY_FILE_TYPE_BIN,"Bin","dat","DAT","Dat","data","DATA","Data"', \ raise Warning('invalid binary file type, should be one of "bin",BINARY_FILE_TYPE_BIN,"Bin","dat","DAT","Dat","data","DATA","Data"', \
self.FileName, self.CurrentLineNumber) self.FileName, self.CurrentLineNumber)
@ -3741,7 +3741,7 @@ class FdfParser:
AlignValue = "" AlignValue = ""
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
#For FFS, Auto is default option same to "" #For FFS, Auto is default option same to ""
@ -3791,7 +3791,7 @@ class FdfParser:
SectAlignment = "" SectAlignment = ""
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
if self.__Token == 'Auto' and (not SectionName == BINARY_FILE_TYPE_PE32) and (not SectionName == BINARY_FILE_TYPE_TE): if self.__Token == 'Auto' and (not SectionName == BINARY_FILE_TYPE_PE32) and (not SectionName == BINARY_FILE_TYPE_TE):
@ -3871,7 +3871,7 @@ class FdfParser:
FvImageSectionObj.FvFileType = self.__Token FvImageSectionObj.FvFileType = self.__Token
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
FvImageSectionObj.Alignment = self.__Token FvImageSectionObj.Alignment = self.__Token
@ -3939,7 +3939,7 @@ class FdfParser:
EfiSectionObj.BuildNum = self.__Token EfiSectionObj.BuildNum = self.__Token
if self.__GetAlignment(): if self.__GetAlignment():
if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K", "128K", if self.__Token not in ("Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
"256K", "512K", "1M", "2M", "4M", "8M", "16M"): "256K", "512K", "1M", "2M", "4M", "8M", "16M"):
raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber) raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
if self.__Token == 'Auto' and (not SectionName == BINARY_FILE_TYPE_PE32) and (not SectionName == BINARY_FILE_TYPE_TE): if self.__Token == 'Auto' and (not SectionName == BINARY_FILE_TYPE_PE32) and (not SectionName == BINARY_FILE_TYPE_TE):
@ -4679,7 +4679,7 @@ class FdfParser:
FvInFdList = self.__GetFvInFd(RefFdName) FvInFdList = self.__GetFvInFd(RefFdName)
if FvInFdList != []: if FvInFdList != []:
for FvNameInFd in FvInFdList: for FvNameInFd in FvInFdList:
LogStr += "FD %s contains FV %s\n" % (RefFdName,FvNameInFd) LogStr += "FD %s contains FV %s\n" % (RefFdName, FvNameInFd)
if FvNameInFd not in RefFvStack: if FvNameInFd not in RefFvStack:
RefFvStack.append(FvNameInFd) RefFvStack.append(FvNameInFd)
@ -4735,7 +4735,7 @@ class FdfParser:
CapInFdList = self.__GetCapInFd(RefFdName) CapInFdList = self.__GetCapInFd(RefFdName)
if CapInFdList != []: if CapInFdList != []:
for CapNameInFd in CapInFdList: for CapNameInFd in CapInFdList:
LogStr += "FD %s contains Capsule %s\n" % (RefFdName,CapNameInFd) LogStr += "FD %s contains Capsule %s\n" % (RefFdName, CapNameInFd)
if CapNameInFd not in RefCapStack: if CapNameInFd not in RefCapStack:
RefCapStack.append(CapNameInFd) RefCapStack.append(CapNameInFd)
@ -4746,7 +4746,7 @@ class FdfParser:
FvInFdList = self.__GetFvInFd(RefFdName) FvInFdList = self.__GetFvInFd(RefFdName)
if FvInFdList != []: if FvInFdList != []:
for FvNameInFd in FvInFdList: for FvNameInFd in FvInFdList:
LogStr += "FD %s contains FV %s\n" % (RefFdName,FvNameInFd) LogStr += "FD %s contains FV %s\n" % (RefFdName, FvNameInFd)
if FvNameInFd not in RefFvList: if FvNameInFd not in RefFvList:
RefFvList.append(FvNameInFd) RefFvList.append(FvNameInFd)

View File

@ -293,7 +293,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
try: try:
Pcd.InfDefaultValue = ValueExpressionEx(Pcd.InfDefaultValue, Pcd.DatumType, Platform._GuidDict)(True) Pcd.InfDefaultValue = ValueExpressionEx(Pcd.InfDefaultValue, Pcd.DatumType, Platform._GuidDict)(True)
except BadExpression: except BadExpression:
EdkLogger.error("GenFds", GENFDS_ERROR, 'PCD [%s.%s] Value "%s"' %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DefaultValue),File=self.InfFileName) EdkLogger.error("GenFds", GENFDS_ERROR, 'PCD [%s.%s] Value "%s"' %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DefaultValue), File=self.InfFileName)
# Check value, if value are equal, no need to patch # Check value, if value are equal, no need to patch
if Pcd.DatumType == TAB_VOID: if Pcd.DatumType == TAB_VOID:
@ -446,7 +446,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.__InfParse__(Dict) self.__InfParse__(Dict)
Arch = self.GetCurrentArch() Arch = self.GetCurrentArch()
SrcFile = mws.join( GenFdsGlobalVariable.WorkSpaceDir , self.InfFileName); SrcFile = mws.join( GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName);
DestFile = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs') DestFile = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
SrcFileDir = "." SrcFileDir = "."
@ -694,13 +694,13 @@ class FfsInfStatement(FfsInfStatementClassObject):
Arch = self.CurrentArch Arch = self.CurrentArch
OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch], OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
Arch , Arch,
ModulePath, ModulePath,
FileName, FileName,
'OUTPUT' 'OUTPUT'
) )
DebugPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch], DebugPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
Arch , Arch,
ModulePath, ModulePath,
FileName, FileName,
'DEBUG' 'DEBUG'
@ -962,9 +962,9 @@ class FfsInfStatement(FfsInfStatementClassObject):
Sect.FvParentAddr = FvParentAddr Sect.FvParentAddr = FvParentAddr
if Rule.KeyStringList != []: if Rule.KeyStringList != []:
SectList, Align = Sect.GenSection(self.OutputPath , self.ModuleGuid, SecIndex, Rule.KeyStringList, self, IsMakefile = IsMakefile) SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, Rule.KeyStringList, self, IsMakefile = IsMakefile)
else : else :
SectList, Align = Sect.GenSection(self.OutputPath , self.ModuleGuid, SecIndex, self.KeyStringList, self, IsMakefile = IsMakefile) SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, self.KeyStringList, self, IsMakefile = IsMakefile)
if not HasGeneratedFlag: if not HasGeneratedFlag:
UniVfrOffsetFileSection = "" UniVfrOffsetFileSection = ""
@ -1121,7 +1121,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
try : try :
SaveFileOnChange(UniVfrOffsetFileName, fStringIO.getvalue()) SaveFileOnChange(UniVfrOffsetFileName, fStringIO.getvalue())
except: except:
EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName,None) EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName, None)
fStringIO.close () fStringIO.close ()

View File

@ -379,8 +379,8 @@ class FV (FvClassObject):
# check if the file path exists or not # check if the file path exists or not
if not os.path.isfile(FileFullPath): if not os.path.isfile(FileFullPath):
GenFdsGlobalVariable.ErrorLogger("Error opening FV Extension Header Entry file %s." % (self.FvExtEntryData[Index])) GenFdsGlobalVariable.ErrorLogger("Error opening FV Extension Header Entry file %s." % (self.FvExtEntryData[Index]))
FvExtFile = open (FileFullPath,'rb') FvExtFile = open (FileFullPath, 'rb')
FvExtFile.seek(0,2) FvExtFile.seek(0, 2)
Size = FvExtFile.tell() Size = FvExtFile.tell()
if Size >= 0x10000: if Size >= 0x10000:
GenFdsGlobalVariable.ErrorLogger("The size of FV Extension Header Entry file %s exceeds 0x10000." % (self.FvExtEntryData[Index])) GenFdsGlobalVariable.ErrorLogger("The size of FV Extension Header Entry file %s exceeds 0x10000." % (self.FvExtEntryData[Index]))

View File

@ -65,7 +65,7 @@ class FvImageSection(FvImageSectionClassObject):
for FvFileName in FileList: for FvFileName in FileList:
FvAlignmentValue = 0 FvAlignmentValue = 0
if os.path.isfile(FvFileName): if os.path.isfile(FvFileName):
FvFileObj = open (FvFileName,'rb') FvFileObj = open (FvFileName, 'rb')
FvFileObj.seek(0) FvFileObj.seek(0)
# PI FvHeader is 0x48 byte # PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48) FvHeaderBuffer = FvFileObj.read(0x48)
@ -113,7 +113,7 @@ class FvImageSection(FvImageSectionClassObject):
if self.FvFileName is not None: if self.FvFileName is not None:
FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName) FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName)
if os.path.isfile(FvFileName): if os.path.isfile(FvFileName):
FvFileObj = open (FvFileName,'rb') FvFileObj = open (FvFileName, 'rb')
FvFileObj.seek(0) FvFileObj.seek(0)
# PI FvHeader is 0x48 byte # PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48) FvHeaderBuffer = FvFileObj.read(0x48)

View File

@ -341,7 +341,7 @@ class GenFdsGlobalVariable:
for Arch in ArchList: for Arch in ArchList:
GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.normpath( GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.normpath(
os.path.join(GlobalData.gWorkspace, os.path.join(GlobalData.gWorkspace,
WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,GlobalData.gGlobalDefines['TARGET'], WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'],
GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory, GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory,
GlobalData.gGlobalDefines['TARGET'] +'_' + GlobalData.gGlobalDefines['TOOLCHAIN'])) GlobalData.gGlobalDefines['TARGET'] +'_' + GlobalData.gGlobalDefines['TOOLCHAIN']))
GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = os.path.normpath( GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = os.path.normpath(
@ -547,7 +547,7 @@ class GenFdsGlobalVariable:
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
if MakefilePath: if MakefilePath:
if (tuple(Cmd),tuple(GenFdsGlobalVariable.SecCmdList),tuple(GenFdsGlobalVariable.CopyList)) not in GenFdsGlobalVariable.FfsCmdDict: if (tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)) not in GenFdsGlobalVariable.FfsCmdDict:
GenFdsGlobalVariable.FfsCmdDict[tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)] = MakefilePath GenFdsGlobalVariable.FfsCmdDict[tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)] = MakefilePath
GenFdsGlobalVariable.SecCmdList = [] GenFdsGlobalVariable.SecCmdList = []
GenFdsGlobalVariable.CopyList = [] GenFdsGlobalVariable.CopyList = []

View File

@ -110,7 +110,7 @@ def _parseForGCC(lines, efifilepath):
PcdName = m.groups(0)[0] PcdName = m.groups(0)[0]
m = pcdPatternGcc.match(lines[index + 1].strip()) m = pcdPatternGcc.match(lines[index + 1].strip())
if m is not None: if m is not None:
bpcds.append((PcdName, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0])) bpcds.append((PcdName, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
# get section information from efi file # get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList efisecs = PeImageClass(efifilepath).SectionHeaderList

View File

@ -88,7 +88,7 @@ if __name__ == '__main__':
parser.add_argument("--signature-size", dest='SignatureSizeStr', type=str, help="specify the signature size for decode process.") parser.add_argument("--signature-size", dest='SignatureSizeStr', type=str, help="specify the signature size for decode process.")
parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages") parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages") parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0,10), default=0, help="set debug level") parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename") parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename")
# #

View File

@ -51,7 +51,7 @@ if __name__ == '__main__':
parser.add_argument("--public-key-hash-c", dest='PublicKeyHashCFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in C structure format") parser.add_argument("--public-key-hash-c", dest='PublicKeyHashCFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in C structure format")
parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages") parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages") parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0,10), default=0, help="set debug level") parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
# #
# Parse command line arguments # Parse command line arguments

View File

@ -50,7 +50,7 @@ EFI_HASH_ALGORITHM_SHA256_GUID = uuid.UUID('{51aa59de-fdf2-4ea3-bc63-875fb7842ee
# UINT8 Signature[256]; # UINT8 Signature[256];
# } EFI_CERT_BLOCK_RSA_2048_SHA256; # } EFI_CERT_BLOCK_RSA_2048_SHA256;
# #
EFI_CERT_BLOCK_RSA_2048_SHA256 = collections.namedtuple('EFI_CERT_BLOCK_RSA_2048_SHA256', ['HashType','PublicKey','Signature']) EFI_CERT_BLOCK_RSA_2048_SHA256 = collections.namedtuple('EFI_CERT_BLOCK_RSA_2048_SHA256', ['HashType', 'PublicKey', 'Signature'])
EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT = struct.Struct('16s256s256s') EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT = struct.Struct('16s256s256s')
# #
@ -71,7 +71,7 @@ if __name__ == '__main__':
parser.add_argument("--private-key", dest='PrivateKeyFile', type=argparse.FileType('rb'), help="specify the private key filename. If not specified, a test signing key is used.") parser.add_argument("--private-key", dest='PrivateKeyFile', type=argparse.FileType('rb'), help="specify the private key filename. If not specified, a test signing key is used.")
parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages") parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages") parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0,10), default=0, help="set debug level") parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename") parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename")
# #
@ -155,7 +155,7 @@ if __name__ == '__main__':
PublicKeyHexString = Process.communicate()[0].split('=')[1].strip() PublicKeyHexString = Process.communicate()[0].split('=')[1].strip()
PublicKey = '' PublicKey = ''
while len(PublicKeyHexString) > 0: while len(PublicKeyHexString) > 0:
PublicKey = PublicKey + chr(int(PublicKeyHexString[0:2],16)) PublicKey = PublicKey + chr(int(PublicKeyHexString[0:2], 16))
PublicKeyHexString=PublicKeyHexString[2:] PublicKeyHexString=PublicKeyHexString[2:]
if Process.returncode != 0: if Process.returncode != 0:
sys.exit(Process.returncode) sys.exit(Process.returncode)

View File

@ -59,11 +59,11 @@ class TargetTool():
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter): def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
"""Convert a text file to a dictionary of (name:value) pairs.""" """Convert a text file to a dictionary of (name:value) pairs."""
try: try:
f = open(FileName,'r') f = open(FileName, 'r')
for Line in f: for Line in f:
if Line.startswith(CommentCharacter) or Line.strip() == '': if Line.startswith(CommentCharacter) or Line.strip() == '':
continue continue
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].strip() Key = LineList[0].strip()
if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary: if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary:
@ -103,7 +103,7 @@ class TargetTool():
if Line.startswith(CommentCharacter) or Line.strip() == '': if Line.startswith(CommentCharacter) or Line.strip() == '':
fw.write(Line) fw.write(Line)
else: else:
LineList = Line.split(KeySplitCharacter,1) LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2: if len(LineList) >= 2:
Key = LineList[0].strip() Key = LineList[0].strip()
if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary: if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary:
@ -202,14 +202,14 @@ def RangeCheckCallback(option, opt_str, value, parser):
parser.error("Option %s only allows one instance in command line!" % option) parser.error("Option %s only allows one instance in command line!" % option)
def MyOptionParser(): def MyOptionParser():
parser = OptionParser(version=__version__,prog="TargetTool.exe",usage=__usage__,description=__copyright__) parser = OptionParser(version=__version__, prog="TargetTool.exe", usage=__usage__, description=__copyright__)
parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32','X64','IPF','EBC', 'ARM', 'AARCH64','0'], dest="TARGET_ARCH", parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32', 'X64', 'IPF', 'EBC', 'ARM', 'AARCH64', '0'], dest="TARGET_ARCH",
help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which replaces target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.") help="ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC, which replaces target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-p", "--platform", action="callback", type="string", dest="DSCFILE", callback=SingleCheckCallback, parser.add_option("-p", "--platform", action="callback", type="string", dest="DSCFILE", callback=SingleCheckCallback,
help="Specify a DSC file, which replace target.txt's ACTIVE_PLATFORM definition. 0 will clear this setting in target.txt and can't combine with other value.") help="Specify a DSC file, which replace target.txt's ACTIVE_PLATFORM definition. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-c", "--tooldef", action="callback", type="string", dest="TOOL_DEFINITION_FILE", callback=SingleCheckCallback, parser.add_option("-c", "--tooldef", action="callback", type="string", dest="TOOL_DEFINITION_FILE", callback=SingleCheckCallback,
help="Specify the WORKSPACE relative path of tool_def.txt file, which replace target.txt's TOOL_CHAIN_CONF definition. 0 will clear this setting in target.txt and can't combine with other value.") help="Specify the WORKSPACE relative path of tool_def.txt file, which replace target.txt's TOOL_CHAIN_CONF definition. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-t", "--target", action="append", type="choice", choices=['DEBUG','RELEASE','0'], dest="TARGET", parser.add_option("-t", "--target", action="append", type="choice", choices=['DEBUG', 'RELEASE', '0'], dest="TARGET",
help="TARGET is one of list: DEBUG, RELEASE, which replaces target.txt's TARGET definition. To specify more TARGET, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.") help="TARGET is one of list: DEBUG, RELEASE, which replaces target.txt's TARGET definition. To specify more TARGET, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-n", "--tagname", action="callback", type="string", dest="TOOL_CHAIN_TAG", callback=SingleCheckCallback, parser.add_option("-n", "--tagname", action="callback", type="string", dest="TOOL_CHAIN_TAG", callback=SingleCheckCallback,
help="Specify the Tool Chain Tagname, which replaces target.txt's TOOL_CHAIN_TAG definition. 0 will clear this setting in target.txt and can't combine with other value.") help="Specify the Tool Chain Tagname, which replaces target.txt's TOOL_CHAIN_TAG definition. 0 will clear this setting in target.txt and can't combine with other value.")

View File

@ -261,7 +261,7 @@ def TrimPreprocessedVfr(Source, Target):
CreateDirectory(os.path.dirname(Target)) CreateDirectory(os.path.dirname(Target))
try: try:
f = open (Source,'r') f = open (Source, 'r')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# read whole file # read whole file
@ -310,7 +310,7 @@ def TrimPreprocessedVfr(Source, Target):
# save all lines trimmed # save all lines trimmed
try: try:
f = open (Target,'w') f = open (Target, 'w')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
f.writelines(Lines) f.writelines(Lines)
@ -407,7 +407,7 @@ def TrimAslFile(Source, Target, IncludePathFile):
if IncludePathFile: if IncludePathFile:
try: try:
LineNum = 0 LineNum = 0
for Line in open(IncludePathFile,'r'): for Line in open(IncludePathFile, 'r'):
LineNum += 1 LineNum += 1
if Line.startswith("/I") or Line.startswith ("-I"): if Line.startswith("/I") or Line.startswith ("-I"):
IncludePathList.append(Line[2:].strip()) IncludePathList.append(Line[2:].strip())
@ -425,7 +425,7 @@ def TrimAslFile(Source, Target, IncludePathFile):
# save all lines trimmed # save all lines trimmed
try: try:
f = open (Target,'w') f = open (Target, 'w')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
@ -560,7 +560,7 @@ def TrimEdkSourceCode(Source, Target):
CreateDirectory(os.path.dirname(Target)) CreateDirectory(os.path.dirname(Target))
try: try:
f = open (Source,'rb') f = open (Source, 'rb')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# read whole file # read whole file
@ -568,7 +568,7 @@ def TrimEdkSourceCode(Source, Target):
f.close() f.close()
NewLines = None NewLines = None
for Re,Repl in gImportCodePatterns: for Re, Repl in gImportCodePatterns:
if NewLines is None: if NewLines is None:
NewLines = Re.sub(Repl, Lines) NewLines = Re.sub(Repl, Lines)
else: else:
@ -579,7 +579,7 @@ def TrimEdkSourceCode(Source, Target):
return return
try: try:
f = open (Target,'wb') f = open (Target, 'wb')
except: except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
f.write(NewLines) f.write(NewLines)

View File

@ -285,8 +285,8 @@ class DependencyRules(object):
pass pass
DecPath = dirname(DecFile) DecPath = dirname(DecFile)
if DecPath.find(WorkSP) > -1: if DecPath.find(WorkSP) > -1:
InstallPath = GetRelativePath(DecPath,WorkSP) InstallPath = GetRelativePath(DecPath, WorkSP)
DecFileRelaPath = GetRelativePath(DecFile,WorkSP) DecFileRelaPath = GetRelativePath(DecFile, WorkSP)
else: else:
InstallPath = DecPath InstallPath = DecPath
DecFileRelaPath = DecFile DecFileRelaPath = DecFile
@ -348,8 +348,8 @@ class DependencyRules(object):
pass pass
DecPath = dirname(DecFile) DecPath = dirname(DecFile)
if DecPath.find(WorkSP) > -1: if DecPath.find(WorkSP) > -1:
InstallPath = GetRelativePath(DecPath,WorkSP) InstallPath = GetRelativePath(DecPath, WorkSP)
DecFileRelaPath = GetRelativePath(DecFile,WorkSP) DecFileRelaPath = GetRelativePath(DecFile, WorkSP)
else: else:
InstallPath = DecPath InstallPath = DecPath
DecFileRelaPath = DecFile DecFileRelaPath = DecFile

View File

@ -459,7 +459,7 @@ class IpiDatabase(object):
(select InstallPath from ModInPkgInfo where (select InstallPath from ModInPkgInfo where
ModInPkgInfo.PackageGuid ='%s' ModInPkgInfo.PackageGuid ='%s'
and ModInPkgInfo.PackageVersion = '%s')""" \ and ModInPkgInfo.PackageVersion = '%s')""" \
% (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1],Pkg[0], Pkg[1]) % (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1])
self.Cur.execute(SqlCommand) self.Cur.execute(SqlCommand)
# #
@ -921,7 +921,7 @@ class IpiDatabase(object):
def __ConvertToSqlString(self, StringList): def __ConvertToSqlString(self, StringList):
if self.DpTable: if self.DpTable:
pass pass
return map(lambda s: s.replace("'", "''") , StringList) return map(lambda s: s.replace("'", "''"), StringList)

View File

@ -632,7 +632,7 @@ def SplitString(String):
# @param StringList: A list for strings to be converted # @param StringList: A list for strings to be converted
# #
def ConvertToSqlString(StringList): def ConvertToSqlString(StringList):
return map(lambda s: s.replace("'", "''") , StringList) return map(lambda s: s.replace("'", "''"), StringList)
## Convert To Sql String ## Convert To Sql String
# #

View File

@ -648,7 +648,7 @@ class DecPomAlignment(PackageObject):
ContainerFile, ContainerFile,
(Item.TokenSpaceGuidCName, Item.TokenCName, (Item.TokenSpaceGuidCName, Item.TokenCName,
Item.DefaultValue, Item.DatumType, Item.TokenValue, Item.DefaultValue, Item.DatumType, Item.TokenValue,
Type, Item.GetHeadComment(), Item.GetTailComment(),''), Type, Item.GetHeadComment(), Item.GetTailComment(), ''),
Language, Language,
self.DecParser.GetDefineSectionMacro() self.DecParser.GetDefineSectionMacro()
) )

View File

@ -314,7 +314,7 @@ def Main():
GlobalData.gDB.CloseDb() GlobalData.gDB.CloseDb()
if pf.system() == 'Windows': if pf.system() == 'Windows':
os.system('subst %s /D' % GlobalData.gWORKSPACE.replace('\\','')) os.system('subst %s /D' % GlobalData.gWORKSPACE.replace('\\', ''))
return ReturnCode return ReturnCode

View File

@ -355,7 +355,7 @@ class PackageHeaderXml(object):
def FromXml(self, Item, Key, PackageObject2): def FromXml(self, Item, Key, PackageObject2):
if not Item: if not Item:
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea'] XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea']
CheckDict = {'PackageHeader':None, } CheckDict = {'PackageHeader': None, }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
self.PackagePath = XmlElement(Item, '%s/PackagePath' % Key) self.PackagePath = XmlElement(Item, '%s/PackagePath' % Key)
self.Header.FromXml(Item, Key) self.Header.FromXml(Item, Key)

View File

@ -103,7 +103,7 @@ class DistributionPackageXml(object):
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
else: else:
XmlTreeLevel = ['DistributionPackage', 'DistributionHeader'] XmlTreeLevel = ['DistributionPackage', 'DistributionHeader']
CheckDict = CheckDict = {'DistributionHeader':'', } CheckDict = CheckDict = {'DistributionHeader': '', }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
# #
@ -123,16 +123,16 @@ class DistributionPackageXml(object):
# #
if self.DistP.Tools: if self.DistP.Tools:
XmlTreeLevel = ['DistributionPackage', 'Tools', 'Header'] XmlTreeLevel = ['DistributionPackage', 'Tools', 'Header']
CheckDict = {'Name':self.DistP.Tools.GetName(), } CheckDict = {'Name': self.DistP.Tools.GetName(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not self.DistP.Tools.GetFileList(): if not self.DistP.Tools.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'Tools'] XmlTreeLevel = ['DistributionPackage', 'Tools']
CheckDict = {'FileName':None, } CheckDict = {'FileName': None, }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for Item in self.DistP.Tools.GetFileList(): for Item in self.DistP.Tools.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'Tools'] XmlTreeLevel = ['DistributionPackage', 'Tools']
CheckDict = {'FileName':Item.GetURI(), } CheckDict = {'FileName': Item.GetURI(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
# #
@ -140,16 +140,16 @@ class DistributionPackageXml(object):
# #
if self.DistP.MiscellaneousFiles: if self.DistP.MiscellaneousFiles:
XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles', 'Header'] XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles', 'Header']
CheckDict = {'Name':self.DistP.MiscellaneousFiles.GetName(), } CheckDict = {'Name': self.DistP.MiscellaneousFiles.GetName(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not self.DistP.MiscellaneousFiles.GetFileList(): if not self.DistP.MiscellaneousFiles.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles'] XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles']
CheckDict = {'FileName':None, } CheckDict = {'FileName': None, }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for Item in self.DistP.MiscellaneousFiles.GetFileList(): for Item in self.DistP.MiscellaneousFiles.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles'] XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles']
CheckDict = {'FileName':Item.GetURI(), } CheckDict = {'FileName': Item.GetURI(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
# #
@ -157,7 +157,7 @@ class DistributionPackageXml(object):
# #
for Item in self.DistP.UserExtensions: for Item in self.DistP.UserExtensions:
XmlTreeLevel = ['DistributionPackage', 'UserExtensions'] XmlTreeLevel = ['DistributionPackage', 'UserExtensions']
CheckDict = {'UserId':Item.GetUserID(), } CheckDict = {'UserId': Item.GetUserID(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@ -449,10 +449,10 @@ def ValidateMS1(Module, TopXmlTreeLevel):
XmlTreeLevel = TopXmlTreeLevel + ['MiscellaneousFiles'] XmlTreeLevel = TopXmlTreeLevel + ['MiscellaneousFiles']
for Item in Module.GetMiscFileList(): for Item in Module.GetMiscFileList():
if not Item.GetFileList(): if not Item.GetFileList():
CheckDict = {'Filename':'', } CheckDict = {'Filename': '', }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for File in Item.GetFileList(): for File in Item.GetFileList():
CheckDict = {'Filename':File.GetURI(), } CheckDict = {'Filename': File.GetURI(), }
## ValidateMS2 ## ValidateMS2
# #
@ -915,10 +915,10 @@ def ValidatePS2(Package):
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'MiscellaneousFiles'] XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'MiscellaneousFiles']
for Item in Package.GetMiscFileList(): for Item in Package.GetMiscFileList():
if not Item.GetFileList(): if not Item.GetFileList():
CheckDict = {'Filename':'', } CheckDict = {'Filename': '', }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for File in Item.GetFileList(): for File in Item.GetFileList():
CheckDict = {'Filename':File.GetURI(), } CheckDict = {'Filename': File.GetURI(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel) IsRequiredItemListNull(CheckDict, XmlTreeLevel)
## ValidatePackageSurfaceArea ## ValidatePackageSurfaceArea

View File

@ -70,23 +70,23 @@ class PcdClassObject(object):
if IsDsc: if IsDsc:
self.DscDefaultValue = Value self.DscDefaultValue = Value
self.PcdValueFromComm = "" self.PcdValueFromComm = ""
self.DefinitionPosition = ("","") self.DefinitionPosition = ("", "")
## Get the maximum number of bytes ## Get the maximum number of bytes
def GetPcdMaxSize(self): def GetPcdMaxSize(self):
if self.DatumType in TAB_PCD_NUMERIC_TYPES: if self.DatumType in TAB_PCD_NUMERIC_TYPES:
return MAX_SIZE_TYPE[self.DatumType] return MAX_SIZE_TYPE[self.DatumType]
MaxSize = int(self.MaxDatumSize,10) if self.MaxDatumSize else 0 MaxSize = int(self.MaxDatumSize, 10) if self.MaxDatumSize else 0
if self.PcdValueFromComm: if self.PcdValueFromComm:
if self.PcdValueFromComm.startswith("{") and self.PcdValueFromComm.endswith("}"): if self.PcdValueFromComm.startswith("{") and self.PcdValueFromComm.endswith("}"):
return max([len(self.PcdValueFromComm.split(",")),MaxSize]) return max([len(self.PcdValueFromComm.split(",")), MaxSize])
elif self.PcdValueFromComm.startswith("\"") or self.PcdValueFromComm.startswith("\'"): elif self.PcdValueFromComm.startswith("\"") or self.PcdValueFromComm.startswith("\'"):
return max([len(self.PcdValueFromComm)-2+1,MaxSize]) return max([len(self.PcdValueFromComm)-2+1, MaxSize])
elif self.PcdValueFromComm.startswith("L\""): elif self.PcdValueFromComm.startswith("L\""):
return max([2*(len(self.PcdValueFromComm)-3+1),MaxSize]) return max([2*(len(self.PcdValueFromComm)-3+1), MaxSize])
else: else:
return max([len(self.PcdValueFromComm),MaxSize]) return max([len(self.PcdValueFromComm), MaxSize])
return MaxSize return MaxSize
## Get the number of bytes ## Get the number of bytes
@ -178,7 +178,7 @@ class StructurePcd(PcdClassObject):
self.DefaultValues[FieldName] = [Value.strip(), FileName, LineNo] self.DefaultValues[FieldName] = [Value.strip(), FileName, LineNo]
return self.DefaultValues[FieldName] return self.DefaultValues[FieldName]
def SetDecDefaultValue(self,DefaultValue): def SetDecDefaultValue(self, DefaultValue):
self.DefaultValueFromDec = DefaultValue self.DefaultValueFromDec = DefaultValue
def AddOverrideValue (self, FieldName, Value, SkuName, DefaultStoreName, FileName="", LineNo=0): def AddOverrideValue (self, FieldName, Value, SkuName, DefaultStoreName, FileName="", LineNo=0):
if SkuName not in self.SkuOverrideValues: if SkuName not in self.SkuOverrideValues:

View File

@ -369,16 +369,16 @@ class DecBuildData(PackageBuildClassObject):
def ProcessStructurePcd(self, StructurePcdRawDataSet): def ProcessStructurePcd(self, StructurePcdRawDataSet):
s_pcd_set = OrderedDict() s_pcd_set = OrderedDict()
for s_pcd,LineNo in StructurePcdRawDataSet: for s_pcd, LineNo in StructurePcdRawDataSet:
if s_pcd.TokenSpaceGuidCName not in s_pcd_set: if s_pcd.TokenSpaceGuidCName not in s_pcd_set:
s_pcd_set[s_pcd.TokenSpaceGuidCName] = [] s_pcd_set[s_pcd.TokenSpaceGuidCName] = []
s_pcd_set[s_pcd.TokenSpaceGuidCName].append((s_pcd,LineNo)) s_pcd_set[s_pcd.TokenSpaceGuidCName].append((s_pcd, LineNo))
str_pcd_set = [] str_pcd_set = []
for pcdname in s_pcd_set: for pcdname in s_pcd_set:
dep_pkgs = [] dep_pkgs = []
struct_pcd = StructurePcd() struct_pcd = StructurePcd()
for item,LineNo in s_pcd_set[pcdname]: for item, LineNo in s_pcd_set[pcdname]:
if "<HeaderFiles>" in item.TokenCName: if "<HeaderFiles>" in item.TokenCName:
struct_pcd.StructuredPcdIncludeFile.append(item.DefaultValue) struct_pcd.StructuredPcdIncludeFile.append(item.DefaultValue)
elif "<Packages>" in item.TokenCName: elif "<Packages>" in item.TokenCName:
@ -391,7 +391,7 @@ class DecBuildData(PackageBuildClassObject):
struct_pcd.PkgPath = self.MetaFile.File struct_pcd.PkgPath = self.MetaFile.File
struct_pcd.SetDecDefaultValue(item.DefaultValue) struct_pcd.SetDecDefaultValue(item.DefaultValue)
else: else:
struct_pcd.AddDefaultValue(item.TokenCName, item.DefaultValue,self.MetaFile.File,LineNo) struct_pcd.AddDefaultValue(item.TokenCName, item.DefaultValue, self.MetaFile.File, LineNo)
struct_pcd.PackageDecs = dep_pkgs struct_pcd.PackageDecs = dep_pkgs
str_pcd_set.append(struct_pcd) str_pcd_set.append(struct_pcd)
@ -412,7 +412,7 @@ class DecBuildData(PackageBuildClassObject):
StrPcdSet = [] StrPcdSet = []
RecordList = self._RawData[Type, self._Arch] RecordList = self._RawData[Type, self._Arch]
for TokenSpaceGuid, PcdCName, Setting, Arch, PrivateFlag, Dummy1, Dummy2 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Arch, PrivateFlag, Dummy1, Dummy2 in RecordList:
PcdDict[Arch, PcdCName, TokenSpaceGuid] = (Setting,Dummy2) PcdDict[Arch, PcdCName, TokenSpaceGuid] = (Setting, Dummy2)
if not (PcdCName, TokenSpaceGuid) in PcdSet: if not (PcdCName, TokenSpaceGuid) in PcdSet:
PcdSet.append((PcdCName, TokenSpaceGuid)) PcdSet.append((PcdCName, TokenSpaceGuid))
@ -421,7 +421,7 @@ class DecBuildData(PackageBuildClassObject):
# limit the ARCH to self._Arch, if no self._Arch found, tdict # limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH and try again # will automatically turn to 'common' ARCH and try again
# #
Setting,LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid] Setting, LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
if Setting is None: if Setting is None:
continue continue
@ -442,9 +442,9 @@ class DecBuildData(PackageBuildClassObject):
list(validlists), list(validlists),
list(expressions) list(expressions)
) )
PcdObj.DefinitionPosition = (self.MetaFile.File,LineNo) PcdObj.DefinitionPosition = (self.MetaFile.File, LineNo)
if "." in TokenSpaceGuid: if "." in TokenSpaceGuid:
StrPcdSet.append((PcdObj,LineNo)) StrPcdSet.append((PcdObj, LineNo))
else: else:
Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdObj Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdObj
@ -455,10 +455,10 @@ class DecBuildData(PackageBuildClassObject):
for pcd in Pcds.values(): for pcd in Pcds.values():
if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]: if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
if StructPattern.match(pcd.DatumType) is None: if StructPattern.match(pcd.DatumType) is None:
EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", pcd.DefinitionPosition[0],pcd.DefinitionPosition[1]) EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", pcd.DefinitionPosition[0], pcd.DefinitionPosition[1])
for struct_pcd in Pcds.values(): for struct_pcd in Pcds.values():
if isinstance(struct_pcd,StructurePcd) and not struct_pcd.StructuredPcdIncludeFile: if isinstance(struct_pcd, StructurePcd) and not struct_pcd.StructuredPcdIncludeFile:
EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "The structure Pcd %s.%s header file is not found in %s line %s \n" % (struct_pcd.TokenSpaceGuidCName, struct_pcd.TokenCName,struct_pcd.DefinitionPosition[0],struct_pcd.DefinitionPosition[1] )) EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "The structure Pcd %s.%s header file is not found in %s line %s \n" % (struct_pcd.TokenSpaceGuidCName, struct_pcd.TokenCName, struct_pcd.DefinitionPosition[0], struct_pcd.DefinitionPosition[1] ))
return Pcds return Pcds
@property @property

View File

@ -40,7 +40,7 @@ import Common.GlobalData as GlobalData
import subprocess import subprocess
from Common.Misc import SaveFileOnChange from Common.Misc import SaveFileOnChange
from Workspace.BuildClassObject import PlatformBuildClassObject, StructurePcd, PcdClassObject, ModuleBuildClassObject from Workspace.BuildClassObject import PlatformBuildClassObject, StructurePcd, PcdClassObject, ModuleBuildClassObject
from collections import OrderedDict,defaultdict from collections import OrderedDict, defaultdict
PcdValueInitName = 'PcdValueInit' PcdValueInitName = 'PcdValueInit'
@ -108,7 +108,7 @@ from AutoGen.GenMake import gIncludePattern
# #
# @retval list The list of files the given source file depends on # @retval list The list of files the given source file depends on
# #
def GetDependencyList(FileStack,SearchPathList): def GetDependencyList(FileStack, SearchPathList):
DepDb = dict() DepDb = dict()
DependencySet = set(FileStack) DependencySet = set(FileStack)
while len(FileStack) > 0: while len(FileStack) > 0:
@ -224,7 +224,7 @@ class DscBuildData(PlatformBuildClassObject):
@property @property
def OutputPath(self): def OutputPath(self):
if os.getenv("WORKSPACE"): if os.getenv("WORKSPACE"):
return os.path.join(os.getenv("WORKSPACE"), self.OutputDirectory, self._Target + "_" + self._Toolchain,PcdValueInitName) return os.path.join(os.getenv("WORKSPACE"), self.OutputDirectory, self._Target + "_" + self._Toolchain, PcdValueInitName)
else: else:
return os.path.dirname(self.DscFile) return os.path.dirname(self.DscFile)
@ -657,7 +657,7 @@ class DscBuildData(PlatformBuildClassObject):
@staticmethod @staticmethod
def ToInt(intstr): def ToInt(intstr):
return int(intstr,16) if intstr.upper().startswith("0X") else int(intstr) return int(intstr, 16) if intstr.upper().startswith("0X") else int(intstr)
def _GetDefaultStores(self): def _GetDefaultStores(self):
if self.DefaultStores is None: if self.DefaultStores is None:
@ -676,9 +676,9 @@ class DscBuildData(PlatformBuildClassObject):
if not IsValidWord(Record[1]): if not IsValidWord(Record[1]):
EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID name is invalid. The correct format is '(a-zA-Z0-9_)(a-zA-Z0-9_-.)*'", EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID name is invalid. The correct format is '(a-zA-Z0-9_)(a-zA-Z0-9_-.)*'",
File=self.MetaFile, Line=Record[-1]) File=self.MetaFile, Line=Record[-1])
self.DefaultStores[Record[1].upper()] = (DscBuildData.ToInt(Record[0]),Record[1].upper()) self.DefaultStores[Record[1].upper()] = (DscBuildData.ToInt(Record[0]), Record[1].upper())
if TAB_DEFAULT_STORES_DEFAULT not in self.DefaultStores: if TAB_DEFAULT_STORES_DEFAULT not in self.DefaultStores:
self.DefaultStores[TAB_DEFAULT_STORES_DEFAULT] = (0,TAB_DEFAULT_STORES_DEFAULT) self.DefaultStores[TAB_DEFAULT_STORES_DEFAULT] = (0, TAB_DEFAULT_STORES_DEFAULT)
GlobalData.gDefaultStores = sorted(self.DefaultStores.keys()) GlobalData.gDefaultStores = sorted(self.DefaultStores.keys())
return self.DefaultStores return self.DefaultStores
@ -736,7 +736,7 @@ class DscBuildData(PlatformBuildClassObject):
for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \ for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \
MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]: MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
RecordList = self._RawData[Type, self._Arch, None, ModuleId] RecordList = self._RawData[Type, self._Arch, None, ModuleId]
for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4,Dummy5 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
TokenList = GetSplitValueList(Setting) TokenList = GetSplitValueList(Setting)
DefaultValue = TokenList[0] DefaultValue = TokenList[0]
# the format is PcdName| Value | VOID* | MaxDatumSize # the format is PcdName| Value | VOID* | MaxDatumSize
@ -761,7 +761,7 @@ class DscBuildData(PlatformBuildClassObject):
# get module private build options # get module private build options
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId] RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4,Dummy5 in RecordList: for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
if (ToolChainFamily, ToolChain) not in Module.BuildOptions: if (ToolChainFamily, ToolChain) not in Module.BuildOptions:
Module.BuildOptions[ToolChainFamily, ToolChain] = Option Module.BuildOptions[ToolChainFamily, ToolChain] = Option
else: else:
@ -801,7 +801,7 @@ class DscBuildData(PlatformBuildClassObject):
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, -1] RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, -1]
Macros = self._Macros Macros = self._Macros
for Record in RecordList: for Record in RecordList:
LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy,Dummy, LineNo = Record LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy, Dummy, LineNo = Record
if LibraryClass == '' or LibraryClass == 'NULL': if LibraryClass == '' or LibraryClass == 'NULL':
self._NullLibraryNumber += 1 self._NullLibraryNumber += 1
LibraryClass = 'NULL%d' % self._NullLibraryNumber LibraryClass = 'NULL%d' % self._NullLibraryNumber
@ -868,7 +868,7 @@ class DscBuildData(PlatformBuildClassObject):
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain] ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
PkgSet.update(ModuleData.Packages) PkgSet.update(ModuleData.Packages)
self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain,PkgSet) self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain, PkgSet)
self._GuidDict.update(GlobalData.gPlatformPcds) self._GuidDict.update(GlobalData.gPlatformPcds)
if (PcdCName, TokenSpaceGuid) not in self._DecPcds: if (PcdCName, TokenSpaceGuid) not in self._DecPcds:
@ -913,14 +913,14 @@ class DscBuildData(PlatformBuildClassObject):
ExtraData="%s.%s" % (TokenSpaceGuid, PcdCName)) ExtraData="%s.%s" % (TokenSpaceGuid, PcdCName))
if PcdType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT): if PcdType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT):
if self._DecPcds[PcdCName, TokenSpaceGuid].DatumType.strip() != ValueList[1].strip(): if self._DecPcds[PcdCName, TokenSpaceGuid].DatumType.strip() != ValueList[1].strip():
EdkLogger.error('build', FORMAT_INVALID, "Pcd datumtype used in DSC file is not the same as its declaration in DEC file." , File=self.MetaFile, Line=LineNo, EdkLogger.error('build', FORMAT_INVALID, "Pcd datumtype used in DSC file is not the same as its declaration in DEC file.", File=self.MetaFile, Line=LineNo,
ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting)) ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
if (TokenSpaceGuid + '.' + PcdCName) in GlobalData.gPlatformPcds: if (TokenSpaceGuid + '.' + PcdCName) in GlobalData.gPlatformPcds:
if GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] != ValueList[Index]: if GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] != ValueList[Index]:
GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] = ValueList[Index] GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] = ValueList[Index]
return ValueList return ValueList
def _FilterPcdBySkuUsage(self,Pcds): def _FilterPcdBySkuUsage(self, Pcds):
available_sku = self.SkuIdMgr.AvailableSkuIdSet available_sku = self.SkuIdMgr.AvailableSkuIdSet
sku_usage = self.SkuIdMgr.SkuUsageType sku_usage = self.SkuIdMgr.SkuUsageType
if sku_usage == SkuClass.SINGLE: if sku_usage == SkuClass.SINGLE:
@ -936,7 +936,7 @@ class DscBuildData(PlatformBuildClassObject):
if type(pcd) is StructurePcd and pcd.SkuOverrideValues: if type(pcd) is StructurePcd and pcd.SkuOverrideValues:
Pcds[pcdname].SkuOverrideValues = {skuid:pcd.SkuOverrideValues[skuid] for skuid in pcd.SkuOverrideValues if skuid in available_sku} Pcds[pcdname].SkuOverrideValues = {skuid:pcd.SkuOverrideValues[skuid] for skuid in pcd.SkuOverrideValues if skuid in available_sku}
return Pcds return Pcds
def CompleteHiiPcdsDefaultStores(self,Pcds): def CompleteHiiPcdsDefaultStores(self, Pcds):
HiiPcd = [Pcds[pcd] for pcd in Pcds if Pcds[pcd].Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]] HiiPcd = [Pcds[pcd] for pcd in Pcds if Pcds[pcd].Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]]
DefaultStoreMgr = DefaultStore(self.DefaultStores) DefaultStoreMgr = DefaultStore(self.DefaultStores)
for pcd in HiiPcd: for pcd in HiiPcd:
@ -958,7 +958,7 @@ class DscBuildData(PlatformBuildClassObject):
else: else:
pcd.PcdValueFromComm = pcd.SkuInfoList.get(TAB_DEFAULT).DefaultValue pcd.PcdValueFromComm = pcd.SkuInfoList.get(TAB_DEFAULT).DefaultValue
for pcd in self._Pcds: for pcd in self._Pcds:
if isinstance(self._Pcds[pcd],StructurePcd) and (self._Pcds[pcd].PcdValueFromComm or self._Pcds[pcd].PcdFieldValueFromComm): if isinstance(self._Pcds[pcd], StructurePcd) and (self._Pcds[pcd].PcdValueFromComm or self._Pcds[pcd].PcdFieldValueFromComm):
UpdateCommandLineValue(self._Pcds[pcd]) UpdateCommandLineValue(self._Pcds[pcd])
def __ParsePcdFromCommandLine(self): def __ParsePcdFromCommandLine(self):
@ -970,10 +970,10 @@ class DscBuildData(PlatformBuildClassObject):
if not pcdvalue: if not pcdvalue:
EdkLogger.error('build', AUTOGEN_ERROR, "No Value specified for the PCD %s." % (pcdname)) EdkLogger.error('build', AUTOGEN_ERROR, "No Value specified for the PCD %s." % (pcdname))
if '.' in pcdname: if '.' in pcdname:
(Name1, Name2) = pcdname.split('.',1) (Name1, Name2) = pcdname.split('.', 1)
if "." in Name2: if "." in Name2:
(Name3, FieldName) = Name2.split(".",1) (Name3, FieldName) = Name2.split(".", 1)
if ((Name3,Name1)) in self.DecPcds: if ((Name3, Name1)) in self.DecPcds:
HasTokenSpace = True HasTokenSpace = True
TokenCName = Name3 TokenCName = Name3
TokenSpaceGuidCName = Name1 TokenSpaceGuidCName = Name1
@ -983,7 +983,7 @@ class DscBuildData(PlatformBuildClassObject):
TokenSpaceGuidCName = '' TokenSpaceGuidCName = ''
HasTokenSpace = False HasTokenSpace = False
else: else:
if ((Name2,Name1)) in self.DecPcds: if ((Name2, Name1)) in self.DecPcds:
HasTokenSpace = True HasTokenSpace = True
TokenCName = Name2 TokenCName = Name2
TokenSpaceGuidCName = Name1 TokenSpaceGuidCName = Name1
@ -1037,7 +1037,7 @@ class DscBuildData(PlatformBuildClassObject):
IsValid, Cause = CheckPcdDatum(PcdDatumType, pcdvalue) IsValid, Cause = CheckPcdDatum(PcdDatumType, pcdvalue)
if not IsValid: if not IsValid:
EdkLogger.error("build", FORMAT_INVALID, Cause, ExtraData="%s.%s" % (TokenSpaceGuidCName, TokenCName)) EdkLogger.error("build", FORMAT_INVALID, Cause, ExtraData="%s.%s" % (TokenSpaceGuidCName, TokenCName))
GlobalData.BuildOptionPcd[i] = (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue,("build command options",1)) GlobalData.BuildOptionPcd[i] = (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, ("build command options", 1))
for BuildData in self._Bdb._CACHE_.values(): for BuildData in self._Bdb._CACHE_.values():
if BuildData.MetaFile.Ext == '.dec' or BuildData.MetaFile.Ext == '.dsc': if BuildData.MetaFile.Ext == '.dec' or BuildData.MetaFile.Ext == '.dsc':
@ -1148,7 +1148,7 @@ class DscBuildData(PlatformBuildClassObject):
# #
for CodeBase in (EDKII_NAME, EDK_NAME): for CodeBase in (EDKII_NAME, EDK_NAME):
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, CodeBase] RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, CodeBase]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4,Dummy5 in RecordList: for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
if Dummy3.upper() != TAB_COMMON: if Dummy3.upper() != TAB_COMMON:
continue continue
CurKey = (ToolChainFamily, ToolChain, CodeBase) CurKey = (ToolChainFamily, ToolChain, CodeBase)
@ -1171,7 +1171,7 @@ class DscBuildData(PlatformBuildClassObject):
DriverType = '%s.%s' % (Edk, ModuleType) DriverType = '%s.%s' % (Edk, ModuleType)
CommonDriverType = '%s.%s' % (TAB_COMMON, ModuleType) CommonDriverType = '%s.%s' % (TAB_COMMON, ModuleType)
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch] RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4,Dummy5 in RecordList: for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
Type = Dummy2 + '.' + Dummy3 Type = Dummy2 + '.' + Dummy3
if Type.upper() == DriverType.upper() or Type.upper() == CommonDriverType.upper(): if Type.upper() == DriverType.upper() or Type.upper() == CommonDriverType.upper():
Key = (ToolChainFamily, ToolChain, Edk) Key = (ToolChainFamily, ToolChain, Edk)
@ -1186,7 +1186,7 @@ class DscBuildData(PlatformBuildClassObject):
def GetStructurePcdInfo(PcdSet): def GetStructurePcdInfo(PcdSet):
structure_pcd_data = defaultdict(list) structure_pcd_data = defaultdict(list)
for item in PcdSet: for item in PcdSet:
structure_pcd_data[(item[0],item[1])].append(item) structure_pcd_data[(item[0], item[1])].append(item)
return structure_pcd_data return structure_pcd_data
@ -1194,25 +1194,25 @@ class DscBuildData(PlatformBuildClassObject):
def OverrideByFdfComm(StruPcds): def OverrideByFdfComm(StruPcds):
StructurePcdInCom = OrderedDict() StructurePcdInCom = OrderedDict()
for item in GlobalData.BuildOptionPcd: for item in GlobalData.BuildOptionPcd:
if len(item) == 5 and (item[1],item[0]) in StruPcds: if len(item) == 5 and (item[1], item[0]) in StruPcds:
StructurePcdInCom[(item[0],item[1],item[2] )] = (item[3],item[4]) StructurePcdInCom[(item[0], item[1], item[2] )] = (item[3], item[4])
GlobalPcds = {(item[0],item[1]) for item in StructurePcdInCom} GlobalPcds = {(item[0], item[1]) for item in StructurePcdInCom}
for Pcd in StruPcds.values(): for Pcd in StruPcds.values():
if (Pcd.TokenSpaceGuidCName,Pcd.TokenCName) not in GlobalPcds: if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) not in GlobalPcds:
continue continue
FieldValues = OrderedDict() FieldValues = OrderedDict()
for item in StructurePcdInCom: for item in StructurePcdInCom:
if (Pcd.TokenSpaceGuidCName,Pcd.TokenCName) == (item[0],item[1]) and item[2]: if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) == (item[0], item[1]) and item[2]:
FieldValues[item[2]] = StructurePcdInCom[item] FieldValues[item[2]] = StructurePcdInCom[item]
for field in FieldValues: for field in FieldValues:
if field not in Pcd.PcdFieldValueFromComm: if field not in Pcd.PcdFieldValueFromComm:
Pcd.PcdFieldValueFromComm[field] = ["","",""] Pcd.PcdFieldValueFromComm[field] = ["", "", ""]
Pcd.PcdFieldValueFromComm[field][0] = FieldValues[field][0] Pcd.PcdFieldValueFromComm[field][0] = FieldValues[field][0]
Pcd.PcdFieldValueFromComm[field][1] = FieldValues[field][1][0] Pcd.PcdFieldValueFromComm[field][1] = FieldValues[field][1][0]
Pcd.PcdFieldValueFromComm[field][2] = FieldValues[field][1][1] Pcd.PcdFieldValueFromComm[field][2] = FieldValues[field][1][1]
return StruPcds return StruPcds
def OverrideByFdfCommOverAll(self,AllPcds): def OverrideByFdfCommOverAll(self, AllPcds):
def CheckStructureInComm(commpcds): def CheckStructureInComm(commpcds):
if not commpcds: if not commpcds:
return False return False
@ -1221,43 +1221,43 @@ class DscBuildData(PlatformBuildClassObject):
return False return False
if CheckStructureInComm(GlobalData.BuildOptionPcd): if CheckStructureInComm(GlobalData.BuildOptionPcd):
StructurePcdInCom = {(item[0],item[1],item[2] ):(item[3],item[4]) for item in GlobalData.BuildOptionPcd } if GlobalData.BuildOptionPcd else {} StructurePcdInCom = {(item[0], item[1], item[2] ):(item[3], item[4]) for item in GlobalData.BuildOptionPcd } if GlobalData.BuildOptionPcd else {}
NoFiledValues = {(item[0],item[1]):StructurePcdInCom[item] for item in StructurePcdInCom if not item[2]} NoFiledValues = {(item[0], item[1]):StructurePcdInCom[item] for item in StructurePcdInCom if not item[2]}
else: else:
NoFiledValues = {(item[0],item[1]):[item[2]] for item in GlobalData.BuildOptionPcd} NoFiledValues = {(item[0], item[1]):[item[2]] for item in GlobalData.BuildOptionPcd}
for Guid,Name in NoFiledValues: for Guid, Name in NoFiledValues:
if (Name,Guid) in AllPcds: if (Name, Guid) in AllPcds:
Pcd = AllPcds.get((Name,Guid)) Pcd = AllPcds.get((Name, Guid))
if isinstance(self._DecPcds.get((Pcd.TokenCName,Pcd.TokenSpaceGuidCName), None),StructurePcd): if isinstance(self._DecPcds.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName), None), StructurePcd):
self._DecPcds.get((Pcd.TokenCName,Pcd.TokenSpaceGuidCName)).PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName,Pcd.TokenCName)][0] self._DecPcds.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName)).PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
else: else:
Pcd.PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName,Pcd.TokenCName)][0] Pcd.PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
Pcd.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName,Pcd.TokenCName)][0] Pcd.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
for sku in Pcd.SkuInfoList: for sku in Pcd.SkuInfoList:
SkuInfo = Pcd.SkuInfoList[sku] SkuInfo = Pcd.SkuInfoList[sku]
if SkuInfo.DefaultValue: if SkuInfo.DefaultValue:
SkuInfo.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName,Pcd.TokenCName)][0] SkuInfo.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
else: else:
SkuInfo.HiiDefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName,Pcd.TokenCName)][0] SkuInfo.HiiDefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
for defaultstore in SkuInfo.DefaultStoreDict: for defaultstore in SkuInfo.DefaultStoreDict:
SkuInfo.DefaultStoreDict[defaultstore] = NoFiledValues[(Pcd.TokenSpaceGuidCName,Pcd.TokenCName)][0] SkuInfo.DefaultStoreDict[defaultstore] = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII]]: if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII]]:
if Pcd.DatumType == TAB_VOID: if Pcd.DatumType == TAB_VOID:
if not Pcd.MaxDatumSize: if not Pcd.MaxDatumSize:
Pcd.MaxDatumSize = '0' Pcd.MaxDatumSize = '0'
CurrentSize = int(Pcd.MaxDatumSize,16) if Pcd.MaxDatumSize.upper().startswith("0X") else int(Pcd.MaxDatumSize) CurrentSize = int(Pcd.MaxDatumSize, 16) if Pcd.MaxDatumSize.upper().startswith("0X") else int(Pcd.MaxDatumSize)
OptionSize = len((StringToArray(Pcd.PcdValueFromComm)).split(",")) OptionSize = len((StringToArray(Pcd.PcdValueFromComm)).split(","))
MaxSize = max(CurrentSize, OptionSize) MaxSize = max(CurrentSize, OptionSize)
Pcd.MaxDatumSize = str(MaxSize) Pcd.MaxDatumSize = str(MaxSize)
else: else:
PcdInDec = self.DecPcds.get((Name,Guid)) PcdInDec = self.DecPcds.get((Name, Guid))
if PcdInDec: if PcdInDec:
PcdInDec.PcdValueFromComm = NoFiledValues[(Guid,Name)][0] PcdInDec.PcdValueFromComm = NoFiledValues[(Guid, Name)][0]
if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE], self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE],
self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG]]: self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG]]:
self.Pcds[Name, Guid] = copy.deepcopy(PcdInDec) self.Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
self.Pcds[Name, Guid].DefaultValue = NoFiledValues[( Guid,Name)][0] self.Pcds[Name, Guid].DefaultValue = NoFiledValues[( Guid, Name)][0]
return AllPcds return AllPcds
def UpdateStructuredPcds(self, TypeList, AllPcds): def UpdateStructuredPcds(self, TypeList, AllPcds):
@ -1281,7 +1281,7 @@ class DscBuildData(PlatformBuildClassObject):
for Type in TypeList: for Type in TypeList:
RecordList.extend(self._RawData[Type, self._Arch]) RecordList.extend(self._RawData[Type, self._Arch])
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, default_store, Dummy4,Dummy5 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, default_store, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper() SkuName = SkuName.upper()
default_store = default_store.upper() default_store = default_store.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
@ -1289,7 +1289,7 @@ class DscBuildData(PlatformBuildClassObject):
continue continue
if SkuName in SkuIds and "." in TokenSpaceGuid: if SkuName in SkuIds and "." in TokenSpaceGuid:
S_PcdSet.append([ TokenSpaceGuid.split(".")[0],TokenSpaceGuid.split(".")[1], PcdCName,SkuName, default_store,Dummy5, AnalyzePcdExpression(Setting)[0]]) S_PcdSet.append([ TokenSpaceGuid.split(".")[0], TokenSpaceGuid.split(".")[1], PcdCName, SkuName, default_store, Dummy5, AnalyzePcdExpression(Setting)[0]])
# handle pcd value override # handle pcd value override
StrPcdSet = DscBuildData.GetStructurePcdInfo(S_PcdSet) StrPcdSet = DscBuildData.GetStructurePcdInfo(S_PcdSet)
@ -1300,7 +1300,7 @@ class DscBuildData(PlatformBuildClassObject):
if not isinstance (str_pcd_dec, StructurePcd): if not isinstance (str_pcd_dec, StructurePcd):
EdkLogger.error('build', PARSER_ERROR, EdkLogger.error('build', PARSER_ERROR,
"Pcd (%s.%s) is not declared as Structure PCD in DEC files. Arch: ['%s']" % (str_pcd[0], str_pcd[1], self._Arch), "Pcd (%s.%s) is not declared as Structure PCD in DEC files. Arch: ['%s']" % (str_pcd[0], str_pcd[1], self._Arch),
File=self.MetaFile,Line = StrPcdSet[str_pcd][0][5]) File=self.MetaFile, Line = StrPcdSet[str_pcd][0][5])
if str_pcd_dec: if str_pcd_dec:
str_pcd_obj_str = StructurePcd() str_pcd_obj_str = StructurePcd()
str_pcd_obj_str.copy(str_pcd_dec) str_pcd_obj_str.copy(str_pcd_dec)
@ -1312,12 +1312,12 @@ class DscBuildData(PlatformBuildClassObject):
str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].DefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList} str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].DefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
for str_pcd_data in StrPcdSet[str_pcd]: for str_pcd_data in StrPcdSet[str_pcd]:
if str_pcd_data[3] in SkuIds: if str_pcd_data[3] in SkuIds:
str_pcd_obj_str.AddOverrideValue(str_pcd_data[2], str(str_pcd_data[6]), TAB_DEFAULT if str_pcd_data[3] == TAB_COMMON else str_pcd_data[3],TAB_DEFAULT_STORES_DEFAULT if str_pcd_data[4] == TAB_COMMON else str_pcd_data[4], self.MetaFile.File if self.WorkspaceDir not in self.MetaFile.File else self.MetaFile.File[len(self.WorkspaceDir) if self.WorkspaceDir.endswith(os.path.sep) else len(self.WorkspaceDir)+1:],LineNo=str_pcd_data[5]) str_pcd_obj_str.AddOverrideValue(str_pcd_data[2], str(str_pcd_data[6]), TAB_DEFAULT if str_pcd_data[3] == TAB_COMMON else str_pcd_data[3], TAB_DEFAULT_STORES_DEFAULT if str_pcd_data[4] == TAB_COMMON else str_pcd_data[4], self.MetaFile.File if self.WorkspaceDir not in self.MetaFile.File else self.MetaFile.File[len(self.WorkspaceDir) if self.WorkspaceDir.endswith(os.path.sep) else len(self.WorkspaceDir)+1:], LineNo=str_pcd_data[5])
S_pcd_set[str_pcd[1], str_pcd[0]] = str_pcd_obj_str S_pcd_set[str_pcd[1], str_pcd[0]] = str_pcd_obj_str
else: else:
EdkLogger.error('build', PARSER_ERROR, EdkLogger.error('build', PARSER_ERROR,
"Pcd (%s.%s) defined in DSC is not declared in DEC files. Arch: ['%s']" % (str_pcd[0], str_pcd[1], self._Arch), "Pcd (%s.%s) defined in DSC is not declared in DEC files. Arch: ['%s']" % (str_pcd[0], str_pcd[1], self._Arch),
File=self.MetaFile,Line = StrPcdSet[str_pcd][0][5]) File=self.MetaFile, Line = StrPcdSet[str_pcd][0][5])
# Add the Structure PCD that only defined in DEC, don't have override in DSC file # Add the Structure PCD that only defined in DEC, don't have override in DSC file
for Pcd in self.DecPcds: for Pcd in self.DecPcds:
if type (self._DecPcds[Pcd]) is StructurePcd: if type (self._DecPcds[Pcd]) is StructurePcd:
@ -1348,7 +1348,7 @@ class DscBuildData(PlatformBuildClassObject):
nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid) nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
stru_pcd.SkuOverrideValues[skuid] = copy.deepcopy(stru_pcd.SkuOverrideValues[nextskuid]) if not NoDefault else copy.deepcopy({defaultstorename: stru_pcd.DefaultValues for defaultstorename in DefaultStores} if DefaultStores else {TAB_DEFAULT_STORES_DEFAULT:stru_pcd.DefaultValues}) stru_pcd.SkuOverrideValues[skuid] = copy.deepcopy(stru_pcd.SkuOverrideValues[nextskuid]) if not NoDefault else copy.deepcopy({defaultstorename: stru_pcd.DefaultValues for defaultstorename in DefaultStores} if DefaultStores else {TAB_DEFAULT_STORES_DEFAULT:stru_pcd.DefaultValues})
if not NoDefault: if not NoDefault:
stru_pcd.ValueChain.add((skuid,'')) stru_pcd.ValueChain.add((skuid, ''))
if stru_pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]: if stru_pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
for skuid in SkuIds: for skuid in SkuIds:
nextskuid = skuid nextskuid = skuid
@ -1367,11 +1367,11 @@ class DscBuildData(PlatformBuildClassObject):
for defaultstoreid in DefaultStores: for defaultstoreid in DefaultStores:
if defaultstoreid not in stru_pcd.SkuOverrideValues[skuid]: if defaultstoreid not in stru_pcd.SkuOverrideValues[skuid]:
stru_pcd.SkuOverrideValues[skuid][defaultstoreid] = copy.deepcopy(stru_pcd.SkuOverrideValues[nextskuid][mindefaultstorename]) stru_pcd.SkuOverrideValues[skuid][defaultstoreid] = copy.deepcopy(stru_pcd.SkuOverrideValues[nextskuid][mindefaultstorename])
stru_pcd.ValueChain.add((skuid,defaultstoreid)) stru_pcd.ValueChain.add((skuid, defaultstoreid))
S_pcd_set = DscBuildData.OverrideByFdfComm(S_pcd_set) S_pcd_set = DscBuildData.OverrideByFdfComm(S_pcd_set)
Str_Pcd_Values = self.GenerateByteArrayValue(S_pcd_set) Str_Pcd_Values = self.GenerateByteArrayValue(S_pcd_set)
if Str_Pcd_Values: if Str_Pcd_Values:
for (skuname,StoreName,PcdGuid,PcdName,PcdValue) in Str_Pcd_Values: for (skuname, StoreName, PcdGuid, PcdName, PcdValue) in Str_Pcd_Values:
str_pcd_obj = S_pcd_set.get((PcdName, PcdGuid)) str_pcd_obj = S_pcd_set.get((PcdName, PcdGuid))
if str_pcd_obj is None: if str_pcd_obj is None:
print(PcdName, PcdGuid) print(PcdName, PcdGuid)
@ -1423,7 +1423,7 @@ class DscBuildData(PlatformBuildClassObject):
elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList: elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
del pcd.SkuInfoList[TAB_COMMON] del pcd.SkuInfoList[TAB_COMMON]
map(self.FilterSkuSettings,[Pcds[pcdkey] for pcdkey in Pcds if Pcds[pcdkey].Type in DynamicPcdType]) map(self.FilterSkuSettings, [Pcds[pcdkey] for pcdkey in Pcds if Pcds[pcdkey].Type in DynamicPcdType])
return Pcds return Pcds
## Retrieve non-dynamic PCD settings ## Retrieve non-dynamic PCD settings
@ -1445,7 +1445,7 @@ class DscBuildData(PlatformBuildClassObject):
# Find out all possible PCD candidates for self._Arch # Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch] RecordList = self._RawData[Type, self._Arch]
PcdValueDict = OrderedDict() PcdValueDict = OrderedDict()
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4,Dummy5 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper() SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in AvailableSkuIdSet: if SkuName not in AvailableSkuIdSet:
@ -1466,7 +1466,7 @@ class DscBuildData(PlatformBuildClassObject):
else: else:
PcdValueDict[PcdCName, TokenSpaceGuid] = {SkuName:(PcdValue, DatumType, MaxDatumSize)} PcdValueDict[PcdCName, TokenSpaceGuid] = {SkuName:(PcdValue, DatumType, MaxDatumSize)}
for ((PcdCName,TokenSpaceGuid),PcdSetting) in PcdValueDict.iteritems(): for ((PcdCName, TokenSpaceGuid), PcdSetting) in PcdValueDict.iteritems():
PcdValue = None PcdValue = None
DatumType = None DatumType = None
MaxDatumSize = None MaxDatumSize = None
@ -1536,7 +1536,7 @@ class DscBuildData(PlatformBuildClassObject):
Result = Result + '"' Result = Result + '"'
return Result return Result
def GenerateSizeFunction(self,Pcd): def GenerateSizeFunction(self, Pcd):
CApp = "// Default Value in Dec \n" CApp = "// Default Value in Dec \n"
CApp = CApp + "void Cal_%s_%s_Size(UINT32 *Size){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) CApp = CApp + "void Cal_%s_%s_Size(UINT32 *Size){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
for FieldList in [Pcd.DefaultValues]: for FieldList in [Pcd.DefaultValues]:
@ -1618,7 +1618,7 @@ class DscBuildData(PlatformBuildClassObject):
while '[' in FieldName: while '[' in FieldName:
FieldName = FieldName.rsplit('[', 1)[0] FieldName = FieldName.rsplit('[', 1)[0]
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), ArrayIndex + 1, Pcd.PcdFieldValueFromComm[FieldName_ori][1], Pcd.PcdFieldValueFromComm[FieldName_ori][2], Pcd.PcdFieldValueFromComm[FieldName_ori][0]) CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), ArrayIndex + 1, Pcd.PcdFieldValueFromComm[FieldName_ori][1], Pcd.PcdFieldValueFromComm[FieldName_ori][2], Pcd.PcdFieldValueFromComm[FieldName_ori][0])
CApp = CApp + " *Size = (%d > *Size ? %d : *Size); // The Pcd maxsize is %d \n" % (Pcd.GetPcdMaxSize(),Pcd.GetPcdMaxSize(),Pcd.GetPcdMaxSize()) CApp = CApp + " *Size = (%d > *Size ? %d : *Size); // The Pcd maxsize is %d \n" % (Pcd.GetPcdMaxSize(), Pcd.GetPcdMaxSize(), Pcd.GetPcdMaxSize())
CApp = CApp + "}\n" CApp = CApp + "}\n"
return CApp return CApp
@ -1628,9 +1628,9 @@ class DscBuildData(PlatformBuildClassObject):
CApp = CApp + ' Cal_%s_%s_Size(&Size);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) CApp = CApp + ' Cal_%s_%s_Size(&Size);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
return CApp return CApp
def GenerateDefaultValueAssignFunction(self,Pcd): def GenerateDefaultValueAssignFunction(self, Pcd):
CApp = "// Default value in Dec \n" CApp = "// Default value in Dec \n"
CApp = CApp + "void Assign_%s_%s_Default_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType) CApp = CApp + "void Assign_%s_%s_Default_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DatumType)
CApp = CApp + ' UINT32 FieldSize;\n' CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n' CApp = CApp + ' CHAR8 *Value;\n'
DefaultValueFromDec = Pcd.DefaultValueFromDec DefaultValueFromDec = Pcd.DefaultValueFromDec
@ -1661,12 +1661,12 @@ class DscBuildData(PlatformBuildClassObject):
FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True) FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
except BadExpression: except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1],FieldList[FieldName][2])) (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
try: try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0]) Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception: except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName,FieldName)),FieldList[FieldName][1], FieldList[FieldName][2])) EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str): if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0]) CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray: elif IsArray:
@ -1689,22 +1689,22 @@ class DscBuildData(PlatformBuildClassObject):
CApp = ' Assign_%s_%s_Default_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) CApp = ' Assign_%s_%s_Default_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
return CApp return CApp
def GenerateInitValueFunction(self,Pcd,SkuName,DefaultStoreName): def GenerateInitValueFunction(self, Pcd, SkuName, DefaultStoreName):
CApp = "// Value in Dsc for Sku: %s, DefaultStore %s\n" % (SkuName,DefaultStoreName) CApp = "// Value in Dsc for Sku: %s, DefaultStore %s\n" % (SkuName, DefaultStoreName)
CApp = CApp + "void Assign_%s_%s_%s_%s_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName,DefaultStoreName,Pcd.DatumType) CApp = CApp + "void Assign_%s_%s_%s_%s_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, SkuName, DefaultStoreName, Pcd.DatumType)
CApp = CApp + ' UINT32 FieldSize;\n' CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n' CApp = CApp + ' CHAR8 *Value;\n'
CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT) CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT)
inherit_OverrideValues = Pcd.SkuOverrideValues[SkuName] inherit_OverrideValues = Pcd.SkuOverrideValues[SkuName]
if (SkuName,DefaultStoreName) == (TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT): if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT):
pcddefaultvalue = Pcd.DefaultFromDSC.get(TAB_DEFAULT,{}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue pcddefaultvalue = Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue
else: else:
if not Pcd.DscRawValue: if not Pcd.DscRawValue:
# handle the case that structure pcd is not appear in DSC # handle the case that structure pcd is not appear in DSC
self.CopyDscRawValue(Pcd) self.CopyDscRawValue(Pcd)
pcddefaultvalue = Pcd.DscRawValue.get(SkuName,{}).get(DefaultStoreName) pcddefaultvalue = Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName)
for FieldList in [pcddefaultvalue,inherit_OverrideValues.get(DefaultStoreName)]: for FieldList in [pcddefaultvalue, inherit_OverrideValues.get(DefaultStoreName)]:
if not FieldList: if not FieldList:
continue continue
if pcddefaultvalue and FieldList == pcddefaultvalue: if pcddefaultvalue and FieldList == pcddefaultvalue:
@ -1717,26 +1717,26 @@ class DscBuildData(PlatformBuildClassObject):
(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList)) (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
Value, ValueSize = ParseFieldValue (FieldList) Value, ValueSize = ParseFieldValue (FieldList)
if (SkuName,DefaultStoreName) == (TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT): if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT):
if isinstance(Value, str): if isinstance(Value, str):
CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DefaultFromDSC.get(TAB_DEFAULT,{}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue) CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
elif IsArray: elif IsArray:
# #
# Use memcpy() to copy value into field # Use memcpy() to copy value into field
# #
CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultFromDSC.get(TAB_DEFAULT,{}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue) CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize) CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
else: else:
if isinstance(Value, str): if isinstance(Value, str):
CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DscRawValue.get(SkuName,{}).get(DefaultStoreName)) CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName))
elif IsArray: elif IsArray:
# #
# Use memcpy() to copy value into field # Use memcpy() to copy value into field
# #
CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DscRawValue.get(SkuName,{}).get(DefaultStoreName)) CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName))
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize) CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
continue continue
if (SkuName,DefaultStoreName) == (TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT) or (( (SkuName,'') not in Pcd.ValueChain) and ( (SkuName,DefaultStoreName) not in Pcd.ValueChain )): if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT) or (( (SkuName, '') not in Pcd.ValueChain) and ( (SkuName, DefaultStoreName) not in Pcd.ValueChain )):
for FieldName in FieldList: for FieldName in FieldList:
IsArray = IsFieldValueAnArray(FieldList[FieldName][0]) IsArray = IsFieldValueAnArray(FieldList[FieldName][0])
if IsArray: if IsArray:
@ -1748,7 +1748,7 @@ class DscBuildData(PlatformBuildClassObject):
try: try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0]) Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception: except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName,FieldName)),FieldList[FieldName][1], FieldList[FieldName][2])) EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str): if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0]) CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray: elif IsArray:
@ -1767,18 +1767,18 @@ class DscBuildData(PlatformBuildClassObject):
return CApp return CApp
@staticmethod @staticmethod
def GenerateInitValueStatement(Pcd,SkuName,DefaultStoreName): def GenerateInitValueStatement(Pcd, SkuName, DefaultStoreName):
CApp = ' Assign_%s_%s_%s_%s_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName,DefaultStoreName) CApp = ' Assign_%s_%s_%s_%s_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, SkuName, DefaultStoreName)
return CApp return CApp
def GenerateCommandLineValue(self,Pcd): def GenerateCommandLineValue(self, Pcd):
CApp = "// Value in CommandLine\n" CApp = "// Value in CommandLine\n"
CApp = CApp + "void Assign_%s_%s_CommandLine_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType) CApp = CApp + "void Assign_%s_%s_CommandLine_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DatumType)
CApp = CApp + ' UINT32 FieldSize;\n' CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n' CApp = CApp + ' CHAR8 *Value;\n'
pcddefaultvalue = Pcd.PcdValueFromComm pcddefaultvalue = Pcd.PcdValueFromComm
for FieldList in [pcddefaultvalue,Pcd.PcdFieldValueFromComm]: for FieldList in [pcddefaultvalue, Pcd.PcdFieldValueFromComm]:
if not FieldList: if not FieldList:
continue continue
if pcddefaultvalue and FieldList == pcddefaultvalue: if pcddefaultvalue and FieldList == pcddefaultvalue:
@ -1813,7 +1813,7 @@ class DscBuildData(PlatformBuildClassObject):
try: try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0]) Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception: except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName,FieldName)),FieldList[FieldName][1], FieldList[FieldName][2])) EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str): if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0]) CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray: elif IsArray:
@ -1855,7 +1855,7 @@ class DscBuildData(PlatformBuildClassObject):
CApp = CApp + '\n' CApp = CApp + '\n'
if SkuName in Pcd.SkuInfoList: if SkuName in Pcd.SkuInfoList:
DefaultValue = Pcd.SkuInfoList[SkuName].DefaultStoreDict.get(DefaultStoreName,Pcd.SkuInfoList[SkuName].HiiDefaultValue if Pcd.SkuInfoList[SkuName].HiiDefaultValue else Pcd.SkuInfoList[SkuName].DefaultValue) DefaultValue = Pcd.SkuInfoList[SkuName].DefaultStoreDict.get(DefaultStoreName, Pcd.SkuInfoList[SkuName].HiiDefaultValue if Pcd.SkuInfoList[SkuName].HiiDefaultValue else Pcd.SkuInfoList[SkuName].DefaultValue)
else: else:
DefaultValue = Pcd.DefaultValue DefaultValue = Pcd.DefaultValue
PcdDefaultValue = StringToArray(DefaultValue.strip()) PcdDefaultValue = StringToArray(DefaultValue.strip())
@ -1901,12 +1901,12 @@ class DscBuildData(PlatformBuildClassObject):
storeset = [DefaultStoreName] if DefaultStoreName == TAB_DEFAULT_STORES_DEFAULT else [TAB_DEFAULT_STORES_DEFAULT, DefaultStoreName] storeset = [DefaultStoreName] if DefaultStoreName == TAB_DEFAULT_STORES_DEFAULT else [TAB_DEFAULT_STORES_DEFAULT, DefaultStoreName]
for defaultstorenameitem in storeset: for defaultstorenameitem in storeset:
CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (skuname, defaultstorenameitem) CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (skuname, defaultstorenameitem)
CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd,skuname,defaultstorenameitem) CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, skuname, defaultstorenameitem)
if skuname == SkuName: if skuname == SkuName:
break break
else: else:
CApp = CApp + "// SkuName: %s, DefaultStoreName: STANDARD \n" % self.SkuIdMgr.SystemSkuId CApp = CApp + "// SkuName: %s, DefaultStoreName: STANDARD \n" % self.SkuIdMgr.SystemSkuId
CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd,self.SkuIdMgr.SystemSkuId,TAB_DEFAULT_STORES_DEFAULT) CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
CApp = CApp + DscBuildData.GenerateCommandLineValueStatement(Pcd) CApp = CApp + DscBuildData.GenerateCommandLineValueStatement(Pcd)
# #
# Set new PCD value and size # Set new PCD value and size
@ -1946,13 +1946,13 @@ class DscBuildData(PlatformBuildClassObject):
CApp = CApp + self.GenerateCommandLineValue(Pcd) CApp = CApp + self.GenerateCommandLineValue(Pcd)
if not Pcd.SkuOverrideValues or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], if not Pcd.SkuOverrideValues or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]: self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
CApp = CApp + self.GenerateInitValueFunction(Pcd,self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT) CApp = CApp + self.GenerateInitValueFunction(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
else: else:
for SkuName in self.SkuIdMgr.SkuOverrideOrder(): for SkuName in self.SkuIdMgr.SkuOverrideOrder():
if SkuName not in Pcd.SkuOverrideValues: if SkuName not in Pcd.SkuOverrideValues:
continue continue
for DefaultStoreName in Pcd.SkuOverrideValues[SkuName]: for DefaultStoreName in Pcd.SkuOverrideValues[SkuName]:
CApp = CApp + self.GenerateInitValueFunction(Pcd,SkuName,DefaultStoreName) CApp = CApp + self.GenerateInitValueFunction(Pcd, SkuName, DefaultStoreName)
if not Pcd.SkuOverrideValues or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], if not Pcd.SkuOverrideValues or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]: self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
InitByteValue, CApp = self.GenerateInitializeFunc(self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd, InitByteValue, CApp) InitByteValue, CApp = self.GenerateInitializeFunc(self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd, InitByteValue, CApp)
@ -1970,7 +1970,7 @@ class DscBuildData(PlatformBuildClassObject):
CApp = CApp + ' )\n' CApp = CApp + ' )\n'
CApp = CApp + '{\n' CApp = CApp + '{\n'
for Pcd in StructuredPcds.values(): for Pcd in StructuredPcds.values():
if not Pcd.SkuOverrideValues or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]: if not Pcd.SkuOverrideValues or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
CApp = CApp + ' Initialize_%s_%s_%s_%s();\n' % (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName) CApp = CApp + ' Initialize_%s_%s_%s_%s();\n' % (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
else: else:
for SkuName in self.SkuIdMgr.SkuOverrideOrder(): for SkuName in self.SkuIdMgr.SkuOverrideOrder():
@ -2072,7 +2072,7 @@ class DscBuildData(PlatformBuildClassObject):
IncludeFileFullPaths = [] IncludeFileFullPaths = []
for includefile in IncludeFiles: for includefile in IncludeFiles:
for includepath in IncSearchList: for includepath in IncSearchList:
includefullpath = os.path.join(str(includepath),includefile) includefullpath = os.path.join(str(includepath), includefile)
if os.path.exists(includefullpath): if os.path.exists(includefullpath):
IncludeFileFullPaths.append(os.path.normpath(includefullpath)) IncludeFileFullPaths.append(os.path.normpath(includefullpath))
break break
@ -2080,7 +2080,7 @@ class DscBuildData(PlatformBuildClassObject):
SearchPathList.append(os.path.normpath(mws.join(GlobalData.gWorkspace, "BaseTools/Source/C/Include"))) SearchPathList.append(os.path.normpath(mws.join(GlobalData.gWorkspace, "BaseTools/Source/C/Include")))
SearchPathList.append(os.path.normpath(mws.join(GlobalData.gWorkspace, "BaseTools/Source/C/Common"))) SearchPathList.append(os.path.normpath(mws.join(GlobalData.gWorkspace, "BaseTools/Source/C/Common")))
SearchPathList.extend(str(item) for item in IncSearchList) SearchPathList.extend(str(item) for item in IncSearchList)
IncFileList = GetDependencyList(IncludeFileFullPaths,SearchPathList) IncFileList = GetDependencyList(IncludeFileFullPaths, SearchPathList)
for include_file in IncFileList: for include_file in IncFileList:
MakeApp += "$(OBJECTS) : %s\n" % include_file MakeApp += "$(OBJECTS) : %s\n" % include_file
MakeFileName = os.path.join(self.OutputPath, 'Makefile') MakeFileName = os.path.join(self.OutputPath, 'Makefile')
@ -2126,7 +2126,7 @@ class DscBuildData(PlatformBuildClassObject):
if FileLine.isdigit(): if FileLine.isdigit():
error_line = FileData[int (FileLine) - 1] error_line = FileData[int (FileLine) - 1]
if r"//" in error_line: if r"//" in error_line:
c_line,dsc_line = error_line.split(r"//") c_line, dsc_line = error_line.split(r"//")
else: else:
dsc_line = error_line dsc_line = error_line
message_itmes = Message.split(":") message_itmes = Message.split(":")
@ -2150,7 +2150,7 @@ class DscBuildData(PlatformBuildClassObject):
else: else:
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % MakeCommand) EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % MakeCommand)
if DscBuildData.NeedUpdateOutput(OutputValueFile, PcdValueInitExe ,InputValueFile): if DscBuildData.NeedUpdateOutput(OutputValueFile, PcdValueInitExe, InputValueFile):
Command = PcdValueInitExe + ' -i %s -o %s' % (InputValueFile, OutputValueFile) Command = PcdValueInitExe + ' -i %s -o %s' % (InputValueFile, OutputValueFile)
returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (Command) returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (Command)
if returncode != 0: if returncode != 0:
@ -2164,7 +2164,7 @@ class DscBuildData(PlatformBuildClassObject):
for Pcd in FileBuffer: for Pcd in FileBuffer:
PcdValue = Pcd.split ('|') PcdValue = Pcd.split ('|')
PcdInfo = PcdValue[0].split ('.') PcdInfo = PcdValue[0].split ('.')
StructurePcdSet.append((PcdInfo[0],PcdInfo[1], PcdInfo[2], PcdInfo[3], PcdValue[2].strip())) StructurePcdSet.append((PcdInfo[0], PcdInfo[1], PcdInfo[2], PcdInfo[3], PcdValue[2].strip()))
return StructurePcdSet return StructurePcdSet
@staticmethod @staticmethod
@ -2198,7 +2198,7 @@ class DscBuildData(PlatformBuildClassObject):
AvailableSkuIdSet = copy.copy(self.SkuIds) AvailableSkuIdSet = copy.copy(self.SkuIds)
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4,Dummy5 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper() SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in AvailableSkuIdSet: if SkuName not in AvailableSkuIdSet:
@ -2260,7 +2260,7 @@ class DscBuildData(PlatformBuildClassObject):
elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList: elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
del pcd.SkuInfoList[TAB_COMMON] del pcd.SkuInfoList[TAB_COMMON]
map(self.FilterSkuSettings,Pcds.values()) map(self.FilterSkuSettings, Pcds.values())
return Pcds return Pcds
@ -2291,7 +2291,7 @@ class DscBuildData(PlatformBuildClassObject):
else: else:
return False return False
def CopyDscRawValue(self,Pcd): def CopyDscRawValue(self, Pcd):
if Pcd.DscRawValue is None: if Pcd.DscRawValue is None:
Pcd.DscRawValue = dict() Pcd.DscRawValue = dict()
if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]: if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
@ -2305,10 +2305,10 @@ class DscBuildData(PlatformBuildClassObject):
Pcd.DscRawValue[skuname][defaultstore] = Pcd.SkuInfoList[skuname].DefaultStoreDict[defaultstore] Pcd.DscRawValue[skuname][defaultstore] = Pcd.SkuInfoList[skuname].DefaultStoreDict[defaultstore]
else: else:
Pcd.DscRawValue[skuname][TAB_DEFAULT_STORES_DEFAULT] = Pcd.SkuInfoList[skuname].DefaultValue Pcd.DscRawValue[skuname][TAB_DEFAULT_STORES_DEFAULT] = Pcd.SkuInfoList[skuname].DefaultValue
def CompletePcdValues(self,PcdSet): def CompletePcdValues(self, PcdSet):
Pcds = {} Pcds = {}
DefaultStoreObj = DefaultStore(self._GetDefaultStores()) DefaultStoreObj = DefaultStore(self._GetDefaultStores())
SkuIds = {skuname:skuid for skuname,skuid in self.SkuIdMgr.AvailableSkuIdSet.items() if skuname != TAB_COMMON} SkuIds = {skuname:skuid for skuname, skuid in self.SkuIdMgr.AvailableSkuIdSet.items() if skuname != TAB_COMMON}
DefaultStores = set(storename for pcdobj in PcdSet.values() for skuobj in pcdobj.SkuInfoList.values() for storename in skuobj.DefaultStoreDict) DefaultStores = set(storename for pcdobj in PcdSet.values() for skuobj in pcdobj.SkuInfoList.values() for storename in skuobj.DefaultStoreDict)
for PcdCName, TokenSpaceGuid in PcdSet: for PcdCName, TokenSpaceGuid in PcdSet:
PcdObj = PcdSet[(PcdCName, TokenSpaceGuid)] PcdObj = PcdSet[(PcdCName, TokenSpaceGuid)]
@ -2330,7 +2330,7 @@ class DscBuildData(PlatformBuildClassObject):
if defaultstorename not in skuobj.DefaultStoreDict: if defaultstorename not in skuobj.DefaultStoreDict:
skuobj.DefaultStoreDict[defaultstorename] = copy.deepcopy(skuobj.DefaultStoreDict[mindefaultstorename]) skuobj.DefaultStoreDict[defaultstorename] = copy.deepcopy(skuobj.DefaultStoreDict[mindefaultstorename])
skuobj.HiiDefaultValue = skuobj.DefaultStoreDict[mindefaultstorename] skuobj.HiiDefaultValue = skuobj.DefaultStoreDict[mindefaultstorename]
for skuname,skuid in SkuIds.items(): for skuname, skuid in SkuIds.items():
if skuname not in PcdObj.SkuInfoList: if skuname not in PcdObj.SkuInfoList:
nextskuid = self.SkuIdMgr.GetNextSkuId(skuname) nextskuid = self.SkuIdMgr.GetNextSkuId(skuname)
while nextskuid not in PcdObj.SkuInfoList: while nextskuid not in PcdObj.SkuInfoList:
@ -2364,7 +2364,7 @@ class DscBuildData(PlatformBuildClassObject):
AvailableSkuIdSet = copy.copy(self.SkuIds) AvailableSkuIdSet = copy.copy(self.SkuIds)
DefaultStoresDefine = self._GetDefaultStores() DefaultStoresDefine = self._GetDefaultStores()
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, DefaultStore, Dummy4,Dummy5 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, DefaultStore, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper() SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
DefaultStore = DefaultStore.upper() DefaultStore = DefaultStore.upper()
@ -2377,14 +2377,14 @@ class DscBuildData(PlatformBuildClassObject):
EdkLogger.error('build', PARAMETER_INVALID, 'DefaultStores %s is not defined in [DefaultStores] section' % DefaultStore, EdkLogger.error('build', PARAMETER_INVALID, 'DefaultStores %s is not defined in [DefaultStores] section' % DefaultStore,
File=self.MetaFile, Line=Dummy5) File=self.MetaFile, Line=Dummy5)
if "." not in TokenSpaceGuid: if "." not in TokenSpaceGuid:
PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,DefaultStore, Dummy5)) PcdSet.add((PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy5))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid,DefaultStore] = Setting PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid, DefaultStore] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU # Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid, SkuName,DefaultStore, Dummy4 in PcdSet: for PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid,DefaultStore] Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid, DefaultStore]
if Setting is None: if Setting is None:
continue continue
VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4) VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
@ -2428,10 +2428,10 @@ class DscBuildData(PlatformBuildClassObject):
Skuitem = pcdObject.SkuInfoList[SkuName] Skuitem = pcdObject.SkuInfoList[SkuName]
Skuitem.DefaultStoreDict.update({DefaultStore:DefaultValue}) Skuitem.DefaultStoreDict.update({DefaultStore:DefaultValue})
else: else:
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute,DefaultStore={DefaultStore:DefaultValue}) SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute, DefaultStore={DefaultStore:DefaultValue})
pcdObject.SkuInfoList[SkuName] = SkuInfo pcdObject.SkuInfoList[SkuName] = SkuInfo
else: else:
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute,DefaultStore={DefaultStore:DefaultValue}) SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute, DefaultStore={DefaultStore:DefaultValue})
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject( Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName, PcdCName,
TokenSpaceGuid, TokenSpaceGuid,
@ -2462,7 +2462,7 @@ class DscBuildData(PlatformBuildClassObject):
pcd.DefaultValue = pcdDecObject.DefaultValue pcd.DefaultValue = pcdDecObject.DefaultValue
if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList: if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
valuefromDec = pcdDecObject.DefaultValue valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec,VariableAttribute=SkuInfoObj.VariableAttribute,DefaultStore={DefaultStore:valuefromDec}) SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec, VariableAttribute=SkuInfoObj.VariableAttribute, DefaultStore={DefaultStore:valuefromDec})
pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList: elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON] pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
@ -2490,7 +2490,7 @@ class DscBuildData(PlatformBuildClassObject):
invalidpcd = ",".join(invalidhii) invalidpcd = ",".join(invalidhii)
EdkLogger.error('build', PCD_VARIABLE_INFO_ERROR, Message='The same HII PCD must map to the same EFI variable for all SKUs', File=self.MetaFile, ExtraData=invalidpcd) EdkLogger.error('build', PCD_VARIABLE_INFO_ERROR, Message='The same HII PCD must map to the same EFI variable for all SKUs', File=self.MetaFile, ExtraData=invalidpcd)
map(self.FilterSkuSettings,Pcds.values()) map(self.FilterSkuSettings, Pcds.values())
return Pcds return Pcds
@ -2499,11 +2499,11 @@ class DscBuildData(PlatformBuildClassObject):
invalidhii = [] invalidhii = []
for pcdname in Pcds: for pcdname in Pcds:
pcd = Pcds[pcdname] pcd = Pcds[pcdname]
varnameset = set(sku.VariableName for (skuid,sku) in pcd.SkuInfoList.items()) varnameset = set(sku.VariableName for (skuid, sku) in pcd.SkuInfoList.items())
if len(varnameset) > 1: if len(varnameset) > 1:
invalidhii.append(".".join((pcdname[1],pcdname[0]))) invalidhii.append(".".join((pcdname[1], pcdname[0])))
if len(invalidhii): if len(invalidhii):
return False,invalidhii return False, invalidhii
else: else:
return True, [] return True, []
## Retrieve dynamic VPD PCD settings ## Retrieve dynamic VPD PCD settings
@ -2527,7 +2527,7 @@ class DscBuildData(PlatformBuildClassObject):
RecordList = self._RawData[Type, self._Arch] RecordList = self._RawData[Type, self._Arch]
AvailableSkuIdSet = copy.copy(self.SkuIds) AvailableSkuIdSet = copy.copy(self.SkuIds)
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4,Dummy5 in RecordList: for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper() SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in AvailableSkuIdSet: if SkuName not in AvailableSkuIdSet:
@ -2595,7 +2595,7 @@ class DscBuildData(PlatformBuildClassObject):
del pcd.SkuInfoList[TAB_COMMON] del pcd.SkuInfoList[TAB_COMMON]
map(self.FilterSkuSettings,Pcds.values()) map(self.FilterSkuSettings, Pcds.values())
return Pcds return Pcds
## Add external modules ## Add external modules
@ -2660,7 +2660,7 @@ class DscBuildData(PlatformBuildClassObject):
continue continue
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain] ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
PkgSet.update(ModuleData.Packages) PkgSet.update(ModuleData.Packages)
self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain,PkgSet) self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain, PkgSet)
return self._DecPcds return self._DecPcds
_Macros = property(_GetMacros) _Macros = property(_GetMacros)
Arch = property(_GetArch, _SetArch) Arch = property(_GetArch, _SetArch)

View File

@ -302,7 +302,7 @@ class MetaFileParser(object):
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT): for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '': if Item == '':
continue continue
ItemList = GetSplitValueList(Item, TAB_SPLIT,3) ItemList = GetSplitValueList(Item, TAB_SPLIT, 3)
# different section should not mix in one section # different section should not mix in one section
if self._SectionName != '' and self._SectionName != ItemList[0].upper(): if self._SectionName != '' and self._SectionName != ItemList[0].upper():
EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section", EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
@ -420,7 +420,7 @@ class MetaFileParser(object):
## Construct section Macro dict ## Construct section Macro dict
def _ConstructSectionMacroDict(self, Name, Value): def _ConstructSectionMacroDict(self, Name, Value):
ScopeKey = [(Scope[0], Scope[1],Scope[2]) for Scope in self._Scope] ScopeKey = [(Scope[0], Scope[1], Scope[2]) for Scope in self._Scope]
ScopeKey = tuple(ScopeKey) ScopeKey = tuple(ScopeKey)
# #
# DecParser SectionType is a list, will contain more than one item only in Pcd Section # DecParser SectionType is a list, will contain more than one item only in Pcd Section
@ -451,15 +451,15 @@ class MetaFileParser(object):
continue continue
for ActiveScope in self._Scope: for ActiveScope in self._Scope:
Scope0, Scope1 ,Scope2= ActiveScope[0], ActiveScope[1],ActiveScope[2] Scope0, Scope1, Scope2= ActiveScope[0], ActiveScope[1], ActiveScope[2]
if(Scope0, Scope1,Scope2) not in Scope: if(Scope0, Scope1, Scope2) not in Scope:
break break
else: else:
SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
for ActiveScope in self._Scope: for ActiveScope in self._Scope:
Scope0, Scope1,Scope2 = ActiveScope[0], ActiveScope[1],ActiveScope[2] Scope0, Scope1, Scope2 = ActiveScope[0], ActiveScope[1], ActiveScope[2]
if(Scope0, Scope1,Scope2) not in Scope and (Scope0, TAB_COMMON, TAB_COMMON) not in Scope and (TAB_COMMON, Scope1, TAB_COMMON) not in Scope: if(Scope0, Scope1, Scope2) not in Scope and (Scope0, TAB_COMMON, TAB_COMMON) not in Scope and (TAB_COMMON, Scope1, TAB_COMMON) not in Scope:
break break
else: else:
ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
@ -636,7 +636,7 @@ class InfParser(MetaFileParser):
# Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1, # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
# #
for Arch, Platform,_ in self._Scope: for Arch, Platform, _ in self._Scope:
LastItem = self._Store(self._SectionType, LastItem = self._Store(self._SectionType,
self._ValueList[0], self._ValueList[0],
self._ValueList[1], self._ValueList[1],
@ -947,7 +947,7 @@ class DscParser(MetaFileParser):
self._DirectiveParser() self._DirectiveParser()
continue continue
if Line[0] == TAB_OPTION_START and not self._InSubsection: if Line[0] == TAB_OPTION_START and not self._InSubsection:
EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1),ExtraData=self.MetaFile) EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1), ExtraData=self.MetaFile)
if self._InSubsection: if self._InSubsection:
SectionType = self._SubsectionType SectionType = self._SubsectionType
@ -1104,7 +1104,7 @@ class DscParser(MetaFileParser):
@ParseMacro @ParseMacro
def _SkuIdParser(self): def _SkuIdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
if len(TokenList) not in (2,3): if len(TokenList) not in (2, 3):
EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Number>|<UiName>[|<UiName>]'", EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Number>|<UiName>[|<UiName>]'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0:len(TokenList)] = TokenList self._ValueList[0:len(TokenList)] = TokenList
@ -1164,7 +1164,7 @@ class DscParser(MetaFileParser):
# Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD # Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD
ValueList = GetSplitValueList(self._ValueList[2]) ValueList = GetSplitValueList(self._ValueList[2])
if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8 , TAB_UINT16, TAB_UINT32 , TAB_UINT64] \ if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64] \
and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]: and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]:
EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1], EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1],
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
@ -1172,7 +1172,7 @@ class DscParser(MetaFileParser):
# Validate the VariableName of DynamicHii and DynamicExHii for PCD Entry must not be an empty string # Validate the VariableName of DynamicHii and DynamicExHii for PCD Entry must not be an empty string
if self._ItemType in [MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII]: if self._ItemType in [MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII]:
DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1) DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if len(DscPcdValueList[0].replace('L','').replace('"','').strip()) == 0: if len(DscPcdValueList[0].replace('L', '').replace('"', '').strip()) == 0:
EdkLogger.error('Parser', FORMAT_INVALID, "The VariableName field in the HII format PCD entry must not be an empty string", EdkLogger.error('Parser', FORMAT_INVALID, "The VariableName field in the HII format PCD entry must not be an empty string",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
@ -1309,7 +1309,7 @@ class DscParser(MetaFileParser):
self._ContentIndex = 0 self._ContentIndex = 0
self._InSubsection = False self._InSubsection = False
while self._ContentIndex < len(self._Content) : while self._ContentIndex < len(self._Content) :
Id, self._ItemType, V1, V2, V3, S1, S2, S3,Owner, self._From, \ Id, self._ItemType, V1, V2, V3, S1, S2, S3, Owner, self._From, \
LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex] LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
if self._From < 0: if self._From < 0:
@ -1327,8 +1327,8 @@ class DscParser(MetaFileParser):
break break
Record = self._Content[self._ContentIndex] Record = self._Content[self._ContentIndex]
if LineStart == Record[10] and LineEnd == Record[12]: if LineStart == Record[10] and LineEnd == Record[12]:
if [Record[5], Record[6],Record[7]] not in self._Scope: if [Record[5], Record[6], Record[7]] not in self._Scope:
self._Scope.append([Record[5], Record[6],Record[7]]) self._Scope.append([Record[5], Record[6], Record[7]])
self._ContentIndex += 1 self._ContentIndex += 1
else: else:
break break
@ -1421,7 +1421,7 @@ class DscParser(MetaFileParser):
MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII, MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD): MODEL_PCD_DYNAMIC_EX_VPD):
Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0) Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0)
for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4,ID, Line in Records: for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4, ID, Line in Records:
Name = TokenSpaceGuid + '.' + PcdName Name = TokenSpaceGuid + '.' + PcdName
if Name not in GlobalData.gPlatformOtherPcds: if Name not in GlobalData.gPlatformOtherPcds:
PcdLine = Line PcdLine = Line
@ -1800,7 +1800,7 @@ class DecParser(MetaFileParser):
if self._DefinesCount > 1: if self._DefinesCount > 1:
EdkLogger.error('Parser', FORMAT_INVALID, 'Multiple [Defines] section is exist.', self.MetaFile ) EdkLogger.error('Parser', FORMAT_INVALID, 'Multiple [Defines] section is exist.', self.MetaFile )
if self._DefinesCount == 0: if self._DefinesCount == 0:
EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.',self.MetaFile) EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.', self.MetaFile)
self._Done() self._Done()
@ -1944,7 +1944,7 @@ class DecParser(MetaFileParser):
self._CurrentStructurePcdName = "" self._CurrentStructurePcdName = ""
else: else:
if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]): if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]):
EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (self._CurrentStructurePcdName , TAB_SPLIT.join(PcdNames[:2])), EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (self._CurrentStructurePcdName, TAB_SPLIT.join(PcdNames[:2])),
File=self.MetaFile, Line=self._LineIndex + 1) File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:]) self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:])
self._ValueList[2] = PcdTockens[1] self._ValueList[2] = PcdTockens[1]

View File

@ -258,8 +258,8 @@ class PackageTable(MetaFileTable):
ValidType = "@ValidList" ValidType = "@ValidList"
if oricomment.startswith("@Expression"): if oricomment.startswith("@Expression"):
ValidType = "@Expression" ValidType = "@Expression"
EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType,TokenSpaceGuid, PcdCName), EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType, TokenSpaceGuid, PcdCName),
ExtraData=oricomment,File=self.MetaFile, Line=LineNum) ExtraData=oricomment, File=self.MetaFile, Line=LineNum)
return set(), set(), set() return set(), set(), set()
return set(validateranges), set(validlists), set(expressions) return set(validateranges), set(validlists), set(expressions)
## Python class representation of table storing platform data ## Python class representation of table storing platform data
@ -308,7 +308,7 @@ class PlatformTable(MetaFileTable):
# #
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, Scope3=TAB_DEFAULT_STORES_DEFAULT,BelongsToItem=-1, def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, Scope3=TAB_DEFAULT_STORES_DEFAULT,BelongsToItem=-1,
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1): FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2,Scope3) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2,Scope3)) (Value1, Value2, Value3, Scope1, Scope2, Scope3) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2, Scope3))
return Table.Insert( return Table.Insert(
self, self,
Model, Model,

View File

@ -53,7 +53,7 @@ def GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain):
# @retval: A dictionary contains instances of PcdClassObject with key (PcdCName, TokenSpaceGuid) # @retval: A dictionary contains instances of PcdClassObject with key (PcdCName, TokenSpaceGuid)
# @retval: A dictionary contains real GUIDs of TokenSpaceGuid # @retval: A dictionary contains real GUIDs of TokenSpaceGuid
# #
def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain,additionalPkgs): def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain, additionalPkgs):
PkgList = GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain) PkgList = GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain)
PkgList = set(PkgList) PkgList = set(PkgList)
PkgList |= additionalPkgs PkgList |= additionalPkgs

View File

@ -1190,7 +1190,7 @@ class PcdReport(object):
FileWrite(File, Array) FileWrite(File, Array)
else: else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES: if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x','0X')): if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0)) Value = '{} ({:d})'.format(Value, int(Value, 0))
else: else:
Value = "0x{:X} ({})".format(int(Value, 0), Value) Value = "0x{:X} ({})".format(int(Value, 0), Value)
@ -1300,9 +1300,9 @@ class PcdReport(object):
else: else:
if IsByteArray: if IsByteArray:
if self.SkuSingle: if self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ' , TypeName, '(' + Pcd.DatumType + ')', "{")) FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', "{"))
else: else:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ' , TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', "{")) FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', "{"))
for Array in ArrayList: for Array in ArrayList:
FileWrite(File, Array) FileWrite(File, Array)
else: else:
@ -1312,9 +1312,9 @@ class PcdReport(object):
else: else:
Value = "0x{:X} ({})".format(int(Value, 0), Value) Value = "0x{:X} ({})".format(int(Value, 0), Value)
if self.SkuSingle: if self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ' , TypeName, '(' + Pcd.DatumType + ')', Value)) FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', Value))
else: else:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ' , TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value)) FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
if TypeName in ('DYNVPD', 'DEXVPD'): if TypeName in ('DYNVPD', 'DEXVPD'):
FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset)) FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset))
if IsStructure: if IsStructure:

View File

@ -54,7 +54,7 @@ import Common.EdkLogger
import Common.GlobalData as GlobalData import Common.GlobalData as GlobalData
from GenFds.GenFds import GenFds from GenFds.GenFds import GenFds
from collections import OrderedDict,defaultdict from collections import OrderedDict, defaultdict
# Version and Copyright # Version and Copyright
VersionNumber = "0.60" + ' ' + gBUILD_VERSION VersionNumber = "0.60" + ' ' + gBUILD_VERSION
@ -526,7 +526,7 @@ class BuildTask:
BuildTask._Thread.acquire(True) BuildTask._Thread.acquire(True)
# start a new build thread # start a new build thread
Bo,Bt = BuildTask._ReadyQueue.popitem() Bo, Bt = BuildTask._ReadyQueue.popitem()
# move into running queue # move into running queue
BuildTask._RunningQueueLock.acquire() BuildTask._RunningQueueLock.acquire()
@ -840,7 +840,7 @@ class Build():
self.HashSkipModules = [] self.HashSkipModules = []
self.Db_Flag = False self.Db_Flag = False
self.LaunchPrebuildFlag = False self.LaunchPrebuildFlag = False
self.PlatformBuildPath = os.path.join(GlobalData.gConfDirectory,'.cache', '.PlatformBuild') self.PlatformBuildPath = os.path.join(GlobalData.gConfDirectory, '.cache', '.PlatformBuild')
if BuildOptions.CommandLength: if BuildOptions.CommandLength:
GlobalData.gCommandMaxLength = BuildOptions.CommandLength GlobalData.gCommandMaxLength = BuildOptions.CommandLength
@ -1133,7 +1133,7 @@ class Build():
# and preserve them for the rest of the main build step, because the child process environment will # and preserve them for the rest of the main build step, because the child process environment will
# evaporate as soon as it exits, we cannot get it in build step. # evaporate as soon as it exits, we cannot get it in build step.
# #
PrebuildEnvFile = os.path.join(GlobalData.gConfDirectory,'.cache','.PrebuildEnv') PrebuildEnvFile = os.path.join(GlobalData.gConfDirectory, '.cache', '.PrebuildEnv')
if os.path.isfile(PrebuildEnvFile): if os.path.isfile(PrebuildEnvFile):
os.remove(PrebuildEnvFile) os.remove(PrebuildEnvFile)
if os.path.isfile(self.PlatformBuildPath): if os.path.isfile(self.PlatformBuildPath):
@ -1173,7 +1173,7 @@ class Build():
f = open(PrebuildEnvFile) f = open(PrebuildEnvFile)
envs = f.readlines() envs = f.readlines()
f.close() f.close()
envs = itertools.imap(lambda l: l.split('=',1), envs) envs = itertools.imap(lambda l: l.split('=', 1), envs)
envs = itertools.ifilter(lambda l: len(l) == 2, envs) envs = itertools.ifilter(lambda l: len(l) == 2, envs)
envs = itertools.imap(lambda l: [i.strip() for i in l], envs) envs = itertools.imap(lambda l: [i.strip() for i in l], envs)
os.environ.update(dict(envs)) os.environ.update(dict(envs))
@ -2358,7 +2358,7 @@ def MyOptionParser():
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".") Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.") Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.")
Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD','LIBRARY','FLASH','DEPEX','BUILD_FLAGS','FIXED_ADDRESS','HASH','EXECUTION_ORDER'], dest="ReportType", default=[], Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD', 'LIBRARY', 'FLASH', 'DEPEX', 'BUILD_FLAGS', 'FIXED_ADDRESS', 'HASH', 'EXECUTION_ORDER'], dest="ReportType", default=[],
help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, HASH, EXECUTION_ORDER]. "\ help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, HASH, EXECUTION_ORDER]. "\
"To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, HASH, BUILD_FLAGS, FIXED_ADDRESS]") "To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, HASH, BUILD_FLAGS, FIXED_ADDRESS]")
Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag", Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",

View File

@ -160,7 +160,7 @@ class BaseToolsTest(unittest.TestCase):
if minlen is None: minlen = 1024 if minlen is None: minlen = 1024
if maxlen is None: maxlen = minlen if maxlen is None: maxlen = minlen
return ''.join( return ''.join(
[chr(random.randint(0,255)) [chr(random.randint(0, 255))
for x in xrange(random.randint(minlen, maxlen)) for x in xrange(random.randint(minlen, maxlen))
]) ])

View File

@ -186,7 +186,7 @@ class Config:
return path return path
def MakeDirs(self): def MakeDirs(self):
for path in (self.src_dir, self.build_dir,self.prefix, self.symlinks): for path in (self.src_dir, self.build_dir, self.prefix, self.symlinks):
if not os.path.exists(path): if not os.path.exists(path):
os.makedirs(path) os.makedirs(path)