Sync BaseTool trunk (version r2640) into EDKII BaseTools.

Signed-off-by: Gao, Liming <liming.gao@intel.com>
Reviewed-by: Liu, Jiang A <jiang.a.liu@intel.com>


git-svn-id: https://svn.code.sf.net/p/edk2/code/trunk/edk2@15089 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
Gao, Liming
2014-01-10 05:25:50 +00:00
committed by lgao4
parent 8d9e16963e
commit 2bc3256ca6
62 changed files with 906 additions and 265 deletions

View File

@ -1,7 +1,7 @@
## @file
# Generate AutoGen.h, AutoGen.c and *.depex files
#
# Copyright (c) 2007 - 2013, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@ -334,6 +334,7 @@ class WorkspaceAutoGen(AutoGen):
# Explicitly collect platform's dynamic PCDs
#
Pa.CollectPlatformDynamicPcds()
Pa.CollectFixedAtBuildPcds()
self.AutoGenObjectList.append(Pa)
#
@ -785,6 +786,7 @@ class PlatformAutoGen(AutoGen):
self._PcdTokenNumber = None # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber
self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
self._NonDynamicPcdDict = {}
self._ToolDefinitions = None
self._ToolDefFile = None # toolcode : tool path
@ -851,6 +853,32 @@ class PlatformAutoGen(AutoGen):
(self.MetaFile, self.Arch))
self.IsMakeFileCreated = True
## Deal with Shared FixedAtBuild Pcds
#
def CollectFixedAtBuildPcds(self):
for LibAuto in self.LibraryAutoGenList:
FixedAtBuildPcds = {}
ShareFixedAtBuildPcdsSameValue = {}
for Module in LibAuto._ReferenceModules:
for Pcd in Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))
if key not in FixedAtBuildPcds:
ShareFixedAtBuildPcdsSameValue[key] = True
FixedAtBuildPcds[key] = Pcd.DefaultValue
else:
if FixedAtBuildPcds[key] != Pcd.DefaultValue:
ShareFixedAtBuildPcdsSameValue[key] = False
for Pcd in LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))
if (Pcd.TokenCName,Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
continue
else:
DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)]
if DscPcd.Type != "FixedAtBuild":
continue
if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
LibAuto.ConstPcd[key] = Pcd.DefaultValue
## Collect dynamic PCDs
#
# Gather dynamic PCDs list from each module and their settings from platform
@ -1296,6 +1324,13 @@ class PlatformAutoGen(AutoGen):
self._PackageList = list(self._PackageList)
return self._PackageList
def _GetNonDynamicPcdDict(self):
if self._NonDynamicPcdDict:
return self._NonDynamicPcdDict
for Pcd in self.NonDynamicPcdList:
self._NonDynamicPcdDict[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)] = Pcd
return self._NonDynamicPcdDict
## Get list of non-dynamic PCDs
def _GetNonDynamicPcdList(self):
if self._NonDynamicPcdList == None:
@ -1373,6 +1408,8 @@ class PlatformAutoGen(AutoGen):
for La in Ma.LibraryAutoGenList:
if La not in self._LibraryAutoGenList:
self._LibraryAutoGenList.append(La)
if Ma not in La._ReferenceModules:
La._ReferenceModules.append(Ma)
## Summarize ModuleAutoGen objects of all modules to be built for this platform
def _GetModuleAutoGenList(self):
@ -1911,6 +1948,7 @@ class PlatformAutoGen(AutoGen):
PcdTokenNumber = property(_GetPcdTokenNumbers) # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber
DynamicPcdList = property(_GetDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
NonDynamicPcdList = property(_GetNonDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
NonDynamicPcdDict = property(_GetNonDynamicPcdDict)
PackageList = property(_GetPackageList)
ToolDefinition = property(_GetToolDefinition) # toolcode : tool path
@ -2027,12 +2065,35 @@ class ModuleAutoGen(AutoGen):
self._FinalBuildTargetList = None
self._FileTypes = None
self._BuildRules = None
## The Modules referenced to this Library
# Only Library has this attribute
self._ReferenceModules = []
## Store the FixedAtBuild Pcds
#
self._FixedAtBuildPcds = []
self.ConstPcd = {}
return True
def __repr__(self):
return "%s [%s]" % (self.MetaFile, self.Arch)
# Get FixedAtBuild Pcds of this Module
def _GetFixedAtBuildPcds(self):
if self._FixedAtBuildPcds:
return self._FixedAtBuildPcds
for Pcd in self.ModulePcdList:
if self.IsLibrary:
if not (Pcd.Pending == False and Pcd.Type == "FixedAtBuild"):
continue
elif Pcd.Type != "FixedAtBuild":
continue
if Pcd not in self._FixedAtBuildPcds:
self._FixedAtBuildPcds.append(Pcd)
return self._FixedAtBuildPcds
# Macros could be used in build_rule.txt (also Makefile)
def _GetMacros(self):
if self._Macro == None:
@ -3102,6 +3163,8 @@ class ModuleAutoGen(AutoGen):
BuildOption = property(_GetModuleBuildOption)
BuildOptionIncPathList = property(_GetBuildOptionIncPathList)
BuildCommand = property(_GetBuildCommand)
FixedAtBuildPcds = property(_GetFixedAtBuildPcds)
# This acts like the main() function for the script, unless it is 'import'ed into another script.
if __name__ == '__main__':

View File

@ -1069,10 +1069,15 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName))
AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))
AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and key in Info.ConstPcd:
AutoGenH.Append('#define _PCD_VALUE_%s %s\n' %(TokenCName, Pcd.DefaultValue))
## Create code for library constructor

View File

@ -528,13 +528,20 @@ class DbSizeTableItemList (DbItemList):
if RawDataList is None:
RawDataList = []
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def GetListSize(self):
length = 0
for Data in self.RawDataList:
length += (1 + len(Data[1]))
return length * self.ItemSize
def PackData(self):
PackStr = "=HH"
PackStr = "=H"
Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]))
GetIntegerValue(Data[0]))
for subData in Data[1]:
Buffer += pack(PackStr,
GetIntegerValue(subData))
return Buffer
## DbStringItemList
@ -732,7 +739,7 @@ def BuildExDataBase(Dict):
DbPcdNameOffsetTable = DbItemList(4,RawDataList = PcdNameOffsetTable)
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
DbSizeTableValue = DbSizeTableItemList(4, RawDataList = SizeTableValue)
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
VardefValueUint16 = Dict['VARDEF_DB_VALUE_UINT16']
@ -812,7 +819,7 @@ def BuildExDataBase(Dict):
SkuIndexIndexTable = [(0) for i in xrange(len(Dict['SKU_INDEX_VALUE']))]
SkuIndexIndexTable[0] = 0 #Dict['SKU_INDEX_VALUE'][0][0]
for i in range(1,len(Dict['SKU_INDEX_VALUE'])):
SkuIndexIndexTable[i] = SkuIndexIndexTable[i-1]+Dict['SKU_INDEX_VALUE'][i-1][0]
SkuIndexIndexTable[i] = SkuIndexIndexTable[i-1]+Dict['SKU_INDEX_VALUE'][i-1][0] + 1
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
DbIndex = 0
DbOffset = FixedHeaderLen
@ -829,7 +836,7 @@ def BuildExDataBase(Dict):
LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
# if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
SkuIndexTabalOffset = SkuIdTableOffset + Dict['SKUID_VALUE'][0]
SkuIndexTabalOffset = SkuIdTableOffset + Dict['SKUID_VALUE'][0] + 1
if (TokenTypeValue & (0x2 << 28)):
SkuTable[SkuHeaderIndex] = (DbOffset|int(TokenTypeValue & ~(0x2<<28)), SkuIndexTabalOffset + SkuIndexIndexTable[SkuHeaderIndex])
LocalTokenNumberTable[LocalTokenNumberTableIndex] = (SkuTableOffset + SkuHeaderIndex * 8) | int(TokenTypeValue)
@ -842,6 +849,7 @@ def BuildExDataBase(Dict):
# resolve variable table offset
for VariableEntries in VariableTable:
skuindex = 0
for VariableEntryPerSku in VariableEntries:
(VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable) = VariableEntryPerSku[:]
DbIndex = 0
@ -853,7 +861,9 @@ def BuildExDataBase(Dict):
DbOffset += DbItemTotal[DbIndex].GetListSize()
else:
assert(False)
if isinstance(VariableRefTable[0],list):
DbOffset += skuindex * 4
skuindex += 1
if DbIndex >= InitTableNum:
assert(False)
@ -995,10 +1005,6 @@ def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH):
DbFile.write(PcdDbBuffer)
Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
def CArrayToArray(carray):
return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in carray])
## Create PCD database in DXE or PEI phase
#
# @param Platform The platform object
@ -1094,6 +1100,8 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['PCD_TOKENSPACE_MAP'] = []
Dict['PCD_NAME_OFFSET'] = []
PCD_STRING_INDEX_MAP = {}
StringTableIndex = 0
StringTableSize = 0
NumberOfLocalTokens = 0
@ -1105,6 +1113,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
GuidList = []
i = 0
for Pcd in Platform.DynamicPcdList:
VoidStarTypeCurrSize = []
i += 1
CName = Pcd.TokenCName
TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
@ -1156,6 +1165,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
SkuIndexTableTmp = []
SkuIndexTableTmp.append(0)
SkuIdIndex = 1
VariableHeadList = []
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
@ -1171,27 +1181,36 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
if len(Sku.VariableName) > 0:
Pcd.TokenTypeList += ['PCD_TYPE_HII']
Pcd.InitString = 'INIT'
# store VariableName to stringTable and calculate the VariableHeadStringIndex
if Sku.VariableName.startswith('{'):
VariableNameStructure = CArrayToArray(Sku.VariableName)
else:
VariableNameStructure = StringToArray(Sku.VariableName)
if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
Dict['STRING_TABLE_CNAME'].append(CName)
Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
if StringTableIndex == 0:
Dict['STRING_TABLE_INDEX'].append('')
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
Dict['STRING_TABLE_LENGTH'].append((len(Sku.VariableName) - 3 + 1) * 2 )
Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
StringTableIndex += 1
StringTableSize += (len(Sku.VariableName) - 3 + 1) * 2
VariableHeadStringIndex = 0
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
# Store all variable names of one HII PCD under different SKU to stringTable
# and calculate the VariableHeadStringIndex
if SkuIdIndex - 2 == 0:
for SkuName in Pcd.SkuInfoList:
SkuInfo = Pcd.SkuInfoList[SkuName]
if SkuInfo.SkuId == None or SkuInfo.SkuId == '':
continue
VariableNameStructure = StringToArray(SkuInfo.VariableName)
if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
Dict['STRING_TABLE_CNAME'].append(CName)
Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
if StringTableIndex == 0:
Dict['STRING_TABLE_INDEX'].append('')
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
VarNameSize = len(VariableNameStructure.replace(',',' ').split())
Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
VarStringDbOffsetList = []
VarStringDbOffsetList.append(StringTableSize)
Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
StringTableIndex += 1
StringTableSize += len(VariableNameStructure.replace(',',' ').split())
VariableHeadStringIndex = 0
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
VariableHeadList.append(VariableHeadStringIndex)
VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
# store VariableGuid to GuidTable and get the VariableHeadGuidIndex
VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
@ -1246,7 +1265,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
# the Pcd default value was filled before
VariableOffset = len(Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]) - 1
VariableRefTable = Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]
VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable])
VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable])
elif Sku.VpdOffset != '':
Pcd.TokenTypeList += ['PCD_TYPE_VPD']
@ -1256,11 +1275,8 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
# Also add the VOID* string of VPD PCD to SizeTable
if Pcd.DatumType == 'VOID*':
NumberOfSizeItems += 1
Dict['SIZE_TABLE_CNAME'].append(CName)
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
# For VPD type of PCD, its current size is equal to its MAX size.
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
continue
if Pcd.DatumType == 'VOID*':
@ -1278,29 +1294,36 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
if Sku.DefaultValue[0] == 'L':
Size = (len(Sku.DefaultValue) - 3 + 1) * 2
Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue))
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(DefaultValueBinStructure.replace(',',' ').split())
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"':
Size = len(Sku.DefaultValue) - 2 + 1
Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue))
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue) -2 + 1
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{':
Size = len(Sku.DefaultValue.replace(',',' ').split())
Dict['STRING_TABLE_VALUE'].append(Sku.DefaultValue)
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue.split(","))
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
StringDbOffsetList.append(StringTableSize)
Dict['SIZE_TABLE_CNAME'].append(CName)
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Size) + 'U')
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
if MaxDatumSize < Size:
MaxDatumSize = Size
Size = MaxDatumSize
Dict['STRING_TABLE_LENGTH'].append(Size)
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
ExtraData="[%s]" % str(Platform))
else:
MaxDatumSize = Size
StringTabLen = MaxDatumSize
if StringTabLen % 2:
StringTabLen += 1
if Sku.VpdOffset == '':
VoidStarTypeCurrSize.append(str(Size) + 'U')
Dict['STRING_TABLE_LENGTH'].append(StringTabLen)
StringTableIndex += 1
StringTableSize += (Size)
StringTableSize += (StringTabLen)
else:
if "PCD_TYPE_HII" not in Pcd.TokenTypeList:
Pcd.TokenTypeList += ['PCD_TYPE_DATA']
@ -1326,8 +1349,14 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
DbValueList.append(Sku.DefaultValue)
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
if Pcd.DatumType == 'VOID*':
Dict['SIZE_TABLE_CNAME'].append(CName)
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
SkuIndexTableTmp[0] = len(SkuIndexTableTmp)
SkuIndexTableTmp[0] = len(SkuIndexTableTmp) - 1
if len(Pcd.SkuInfoList) > 1:
Dict['SKU_INDEX_VALUE'].append(SkuIndexTableTmp)
@ -1352,6 +1381,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList))
Dict['STRING_DB_VALUE'].append(StringDbOffsetList)
PCD_STRING_INDEX_MAP[len(Dict['STRING_HEAD_CNAME_DECL']) -1 ] = len(Dict['STRING_DB_VALUE']) -1
if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName)
Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid)
@ -1405,11 +1435,12 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
Dict['PCD_TOKENSPACE'].append(TokenSpaceGuidCNameArray)
Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCName) + 1 )
Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCNameArray.split(",")) )
Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
Dict['PCD_CNAME'][GeneratedTokenNumber] = StringToArray('"' + CName + '"' )
CNameBinArray = StringToArray('"' + CName + '"' )
Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CName) + 1
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
@ -1427,6 +1458,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
if 'PCD_TYPE_STRING' in Pcd.TokenTypeList and 'PCD_TYPE_HII' not in Pcd.TokenTypeList:
# Find index by CName, TokenSpaceGuid
Offset = GetMatchedIndex(CName, Dict['STRING_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['STRING_HEAD_GUID_DECL'])
Offset = PCD_STRING_INDEX_MAP[Offset]
assert(Offset != -1)
Table = Dict['STRING_DB_VALUE']
if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
@ -1475,13 +1507,13 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
for index in range(len(Dict['PCD_TOKENSPACE'])):
StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
StringTableIndex += 1
for index in range(len(Dict['PCD_CNAME'])):
Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
Dict['PCD_NAME_OFFSET'].append(StringTableSize)
StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
StringTableIndex += 1
if GuidList != []:
Dict['GUID_TABLE_EMPTY'] = 'FALSE'
Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
@ -1501,7 +1533,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
if Dict['SIZE_TABLE_CNAME'] == []:
Dict['SIZE_TABLE_CNAME'].append('')
Dict['SIZE_TABLE_GUID'].append('')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append('0U')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(['0U'])
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
if NumberOfLocalTokens != 0:
@ -1524,7 +1556,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
if NumberOfSkuEnabledPcd != 0:
Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE'])
Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
if NumberOfLocalTokens == 0:

View File

@ -13,4 +13,4 @@
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
gBUILD_VERSION = "Build 2610"
gBUILD_VERSION = "Build 2640"

View File

@ -486,6 +486,8 @@ PCDS_DYNAMICEX_DEFAULT = "PcdsDynamicExDefault"
PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
SECTIONS_HAVE_ITEM_PCD = [PCDS_DYNAMIC_DEFAULT.upper(),PCDS_DYNAMIC_VPD.upper(),PCDS_DYNAMIC_HII.upper(), \
PCDS_DYNAMICEX_DEFAULT.upper(),PCDS_DYNAMICEX_VPD.upper(),PCDS_DYNAMICEX_HII.upper()]
# Section allowed to have items after arch
SECTIONS_HAVE_ITEM_AFTER_ARCH = [TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(),
PCDS_DYNAMIC_DEFAULT.upper(),

View File

@ -1238,9 +1238,16 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
Value = FieldList[0]
Size = ''
if len(FieldList) > 1:
Size = FieldList[1]
Type = FieldList[1]
# Fix the PCD type when no DataType input
if Type == 'VOID*':
DataType = 'VOID*'
else:
Size = FieldList[1]
if len(FieldList) > 2:
Size = FieldList[2]
if DataType == 'VOID*':
IsValid = (len(FieldList) <= 2)
IsValid = (len(FieldList) <= 3)
else:
IsValid = (len(FieldList) <= 1)
return [Value, '', Size], IsValid, 0
@ -1255,7 +1262,12 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
Size = FieldList[2]
else:
if Type == 'VOID*':
Size = str(len(Value))
if Value.startswith("L"):
Size = str((len(Value)- 3 + 1) * 2)
elif Value.startswith("{"):
Size = str(len(Value.split(",")))
else:
Size = str(len(Value) -2 + 1 )
if DataType == 'VOID*':
IsValid = (len(FieldList) <= 3)
else:

View File

@ -401,16 +401,6 @@ def CleanString2(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyl
Comment = Line[Index:].strip()
Line = Line[0:Index].strip()
break
if Comment:
# Remove prefixed and trailing comment characters
Start = 0
End = len(Comment)
while Start < End and Comment.startswith(CommentCharacter, Start, End):
Start += 1
while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
End -= 1
Comment = Comment[Start:End]
Comment = Comment.strip()
return Line, Comment
@ -811,11 +801,25 @@ def StringToArray(String):
return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String[2:-1]])
elif String.startswith('"'):
if String == "\"\"":
return "{0x00}";
return "{0x00,0x00}"
else:
return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
StringLen = len(String[1:-1])
if StringLen % 2:
return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
else:
return "{%s, 0x00,0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
elif String.startswith('{'):
StringLen = len(String.split(","))
if StringLen % 2:
return "{%s, 0x00}" % ", ".join([ C for C in String[1:-1].split(',')])
else:
return "{%s}" % ", ".join([ C for C in String[1:-1].split(',')])
else:
return '{%s, 0}' % ', '.join(String.split())
if len(String.split()) % 2:
return '{%s, 0}' % ', '.join(String.split())
else:
return '{%s, 0,0}' % ', '.join(String.split())
def StringArrayLength(String):
if isinstance(String, unicode):

View File

@ -97,6 +97,7 @@ MODEL_META_DATA_COMMENT = 5016
MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_META_DATA_SECTION_HEADER = 5100
MODEL_META_DATA_SUBSECTION_HEADER = 5200
MODEL_META_DATA_TAIL_COMMENT = 5300
MODEL_EXTERNAL_DEPENDENCY = 10000

View File

@ -1,7 +1,7 @@
## @file
# generate capsule
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007-2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@ -133,5 +133,25 @@ class CapsuleAnyFile (CapsuleData):
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
return self.FileName
## Afile class for capsule data
#
#
class CapsuleAfile (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FileName = None
## generate Afile capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
return self.FileName

View File

@ -1,7 +1,7 @@
## @file
# parse FDF file
#
# Copyright (c) 2007 - 2013, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@ -77,6 +77,7 @@ SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
RegionSizePattern = re.compile("\s*(?P<base>(?:0x|0X)?[a-fA-F0-9]+)\s*\|\s*(?P<size>(?:0x|0X)?[a-fA-F0-9]+)\s*")
RegionSizeGuidPattern = re.compile("\s*(?P<base>\w+\.\w+)\s*\|\s*(?P<size>\w+\.\w+)\s*")
RegionOffsetPcdPattern = re.compile("\s*(?P<base>\w+\.\w+)\s*$")
ShortcutPcdPattern = re.compile("\s*\w+\s*=\s*(?P<value>(?:0x|0X)?[a-fA-F0-9]+)\s*\|\s*(?P<name>\w+\.\w+)\s*")
IncludeFileList = []
@ -1732,8 +1733,7 @@ class FdfParser:
try:
return long(
ValueExpression(Expr,
dict(['%s.%s' % (Pcd[1], Pcd[0]), Val]
for Pcd, Val in self.Profile.PcdDict.iteritems())
self.__CollectMacroPcd()
)(True),0)
except Exception:
self.SetFileBufferPos(StartPos)
@ -1769,16 +1769,26 @@ class FdfParser:
return True
if not self.__Token in ("SET", "FV", "FILE", "DATA", "CAPSULE"):
#
# If next token is a word which is not a valid FV type, it might be part of [PcdOffset[|PcdSize]]
# Or it might be next region's offset described by an expression which starts with a PCD.
# PcdOffset[|PcdSize] or OffsetPcdExpression|Size
#
self.__UndoToken()
RegionObj.PcdOffset = self.__GetNextPcdName()
self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + long(Fd.BaseAddress, 0))
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
self.Profile.PcdFileLineDict[RegionObj.PcdOffset] = FileLineTuple
if self.__IsToken( "|"):
RegionObj.PcdSize = self.__GetNextPcdName()
self.Profile.PcdDict[RegionObj.PcdSize] = "0x%08X" % RegionObj.Size
IsRegionPcd = (RegionSizeGuidPattern.match(self.__CurrentLine()[self.CurrentOffsetWithinLine:]) or
RegionOffsetPcdPattern.match(self.__CurrentLine()[self.CurrentOffsetWithinLine:]))
if IsRegionPcd:
RegionObj.PcdOffset = self.__GetNextPcdName()
self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + long(Fd.BaseAddress, 0))
self.__PcdDict['%s.%s' % (RegionObj.PcdOffset[1], RegionObj.PcdOffset[0])] = "0x%x" % RegionObj.Offset
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
self.Profile.PcdFileLineDict[RegionObj.PcdSize] = FileLineTuple
self.Profile.PcdFileLineDict[RegionObj.PcdOffset] = FileLineTuple
if self.__IsToken( "|"):
RegionObj.PcdSize = self.__GetNextPcdName()
self.Profile.PcdDict[RegionObj.PcdSize] = "0x%08X" % RegionObj.Size
self.__PcdDict['%s.%s' % (RegionObj.PcdSize[1], RegionObj.PcdSize[0])] = "0x%x" % RegionObj.Size
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
self.Profile.PcdFileLineDict[RegionObj.PcdSize] = FileLineTuple
if not self.__GetNextWord():
return True
@ -1805,6 +1815,9 @@ class FdfParser:
self.__UndoToken()
self.__GetRegionDataType( RegionObj)
else:
self.__UndoToken()
if self.__GetRegionLayout(Fd):
return True
raise Warning("A valid region type was not found. "
"Valid types are [SET, FV, CAPSULE, FILE, DATA]. This error occurred",
self.FileName, self.CurrentLineNumber)
@ -2158,8 +2171,9 @@ class FdfParser:
# @retval None
#
def __GetFvAttributes(self, FvObj):
IsWordToken = False
while self.__GetNextWord():
IsWordToken = True
name = self.__Token
if name not in ("ERASE_POLARITY", "MEMORY_MAPPED", \
"STICKY_WRITE", "LOCK_CAP", "LOCK_STATUS", "WRITE_ENABLED_CAP", \
@ -2178,7 +2192,7 @@ class FdfParser:
FvObj.FvAttributeDict[name] = self.__Token
return True
return IsWordToken
## __GetFvNameGuid() method
#
@ -2562,22 +2576,7 @@ class FdfParser:
FfsFileObj.CurrentLineNum = self.CurrentLineNumber
FfsFileObj.CurrentLineContent = self.__CurrentLine()
FfsFileObj.FileName = self.__Token
if FfsFileObj.FileName.replace('$(WORKSPACE)', '').find('$') == -1:
#
# For file in OUTPUT_DIRECTORY will not check whether it exist or not at AutoGen phase.
#
if not GlobalData.gAutoGenPhase:
#do case sensitive check for file path
ErrorCode, ErrorInfo = PathClass(NormPath(FfsFileObj.FileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
else:
if not self.__GetMacroValue("OUTPUT_DIRECTORY") in FfsFileObj.FileName:
#do case sensitive check for file path
ErrorCode, ErrorInfo = PathClass(NormPath(FfsFileObj.FileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
self.__VerifyFile(FfsFileObj.FileName)
if not self.__IsToken( "}"):
raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
@ -2823,11 +2822,7 @@ class FdfParser:
if not self.__GetNextToken():
raise Warning("expected section file path", self.FileName, self.CurrentLineNumber)
DataSectionObj.SectFileName = self.__Token
if DataSectionObj.SectFileName.replace('$(WORKSPACE)', '').find('$') == -1:
#do case sensitive check for file path
ErrorCode, ErrorInfo = PathClass(NormPath(DataSectionObj.SectFileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
self.__VerifyFile(DataSectionObj.SectFileName)
else:
if not self.__GetCglSection(DataSectionObj):
return False
@ -2836,6 +2831,21 @@ class FdfParser:
return True
## __VerifyFile
#
# Check if file exists or not:
# If current phase if GenFds, the file must exist;
# If current phase is AutoGen and the file is not in $(OUTPUT_DIRECTORY), the file must exist
# @param FileName: File path to be verified.
#
def __VerifyFile(self, FileName):
if FileName.replace('$(WORKSPACE)', '').find('$') != -1:
return
if not GlobalData.gAutoGenPhase or not self.__GetMacroValue("OUTPUT_DIRECTORY") in FileName:
ErrorCode, ErrorInfo = PathClass(NormPath(FileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
## __GetCglSection() method
#
# Get compressed or GUIDed section for Obj
@ -3066,12 +3076,14 @@ class FdfParser:
Value += self.__Token.strip()
elif Name == 'OEM_CAPSULE_FLAGS':
Value = self.__Token.strip()
if not Value.upper().startswith('0X'):
raise Warning("expected hex value between 0x0000 and 0xFFFF", self.FileName, self.CurrentLineNumber)
try:
Value = int(Value, 0)
except ValueError:
raise Warning("expected integer value between 0x0000 and 0xFFFF", self.FileName, self.CurrentLineNumber)
raise Warning("expected hex value between 0x0000 and 0xFFFF", self.FileName, self.CurrentLineNumber)
if not 0x0000 <= Value <= 0xFFFF:
raise Warning("expected integer value between 0x0000 and 0xFFFF", self.FileName, self.CurrentLineNumber)
raise Warning("expected hex value between 0x0000 and 0xFFFF", self.FileName, self.CurrentLineNumber)
Value = self.__Token.strip()
else:
Value = self.__Token.strip()
@ -3095,7 +3107,8 @@ class FdfParser:
IsFv = self.__GetFvStatement(Obj)
IsFd = self.__GetFdStatement(Obj)
IsAnyFile = self.__GetAnyFileStatement(Obj)
if not (IsInf or IsFile or IsFv or IsFd or IsAnyFile):
IsAfile = self.__GetAfileStatement(Obj)
if not (IsInf or IsFile or IsFv or IsFd or IsAnyFile or IsAfile):
break
## __GetFvStatement() method
@ -3187,6 +3200,47 @@ class FdfParser:
CapsuleAnyFile.FileName = AnyFileName
CapsuleObj.CapsuleDataList.append(CapsuleAnyFile)
return True
## __GetAfileStatement() method
#
# Get Afile for capsule
#
# @param self The object pointer
# @param CapsuleObj for whom Afile is got
# @retval True Successfully find a Afile statement
# @retval False Not able to find a Afile statement
#
def __GetAfileStatement(self, CapsuleObj):
if not self.__IsKeyword("APPEND"):
return False
if not self.__IsToken("="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextToken():
raise Warning("expected Afile name", self.FileName, self.CurrentLineNumber)
AfileName = self.__Token
AfileBaseName = os.path.basename(AfileName)
if os.path.splitext(AfileBaseName)[1] not in [".bin",".BIN",".Bin",".dat",".DAT",".Dat",".data",".DATA",".Data"]:
raise Warning('invalid binary file type, should be one of "bin","BIN","Bin","dat","DAT","Dat","data","DATA","Data"', \
self.FileName, self.CurrentLineNumber)
if not os.path.isabs(AfileName):
AfileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(AfileName)
self.__VerifyFile(AfileName)
else:
if not os.path.exists(AfileName):
raise Warning('%s does not exist' % AfileName, self.FileName, self.CurrentLineNumber)
else:
pass
CapsuleAfile = CapsuleData.CapsuleAfile()
CapsuleAfile.FileName = AfileName
CapsuleObj.CapsuleDataList.append(CapsuleAfile)
return True
## __GetRule() method
#

View File

@ -43,6 +43,8 @@ from PatchPcdValue.PatchPcdValue import PatchBinaryFile
#
#
class FfsInfStatement(FfsInfStatementClassObject):
## The mapping dictionary from datum type to its maximum number.
_MAX_SIZE_TYPE = {"BOOLEAN":0x01, "UINT8":0xFF, "UINT16":0xFFFF, "UINT32":0xFFFFFFFF, "UINT64":0xFFFFFFFFFFFFFFFF}
## The constructor
#
# @param self The object pointer
@ -204,10 +206,15 @@ class FfsInfStatement(FfsInfStatementClassObject):
if Inf._Defs != None and len(Inf._Defs) > 0:
self.OptRomDefs.update(Inf._Defs)
self.PatchPcds = []
InfPcds = Inf.Pcds
Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
FdfPcdDict = GenFdsGlobalVariable.FdfParser.Profile.PcdDict
# Workaround here: both build and GenFds tool convert the workspace path to lower case
# But INF file path in FDF and DSC file may have real case characters.
# Try to convert the path to lower case to see if PCDs value are override by DSC.
DscModules = {}
for DscModule in Platform.Modules:
DscModules[str(DscModule).lower()] = Platform.Modules[DscModule]
@ -217,6 +224,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
continue
if Pcd.Type != 'PatchableInModule':
continue
# Override Patchable PCD value by the value from DSC
PatchPcd = None
InfLowerPath = str(PathClassObj).lower()
if InfLowerPath in DscModules and PcdKey in DscModules[InfLowerPath].Pcds:
@ -227,16 +235,22 @@ class FfsInfStatement(FfsInfStatementClassObject):
if PatchPcd and Pcd.Type == PatchPcd.Type:
DefaultValue = PatchPcd.DefaultValue
DscOverride = True
# Override Patchable PCD value by the value from FDF
FdfOverride = False
if PcdKey in FdfPcdDict:
DefaultValue = FdfPcdDict[PcdKey]
FdfOverride = True
if not DscOverride and not FdfOverride:
continue
# Check value, if value are equal, no need to patch
if Pcd.DatumType == "VOID*":
if Pcd.DefaultValue == DefaultValue or DefaultValue in [None, '']:
continue
# Get the string size from FDF or DSC
if DefaultValue[0] == 'L':
# Remove L"", but the '\0' must be appended
MaxDatumSize = str((len(DefaultValue) - 2) * 2)
elif DefaultValue[0] == '{':
MaxDatumSize = str(len(DefaultValue.split(',')))
@ -244,6 +258,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
MaxDatumSize = str(len(DefaultValue) - 1)
if DscOverride:
Pcd.MaxDatumSize = PatchPcd.MaxDatumSize
# If no defined the maximum size in DSC, try to get current size from INF
if Pcd.MaxDatumSize in ['', None]:
Pcd.MaxDatumSize = str(len(Pcd.DefaultValue.split(',')))
else:
@ -259,6 +274,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
continue
except:
continue
# Check the Pcd size and data type
if Pcd.DatumType == "VOID*":
if int(MaxDatumSize) > int(Pcd.MaxDatumSize):
EdkLogger.error("GenFds", GENFDS_ERROR, "The size of VOID* type PCD '%s.%s' exceeds its maximum size %d bytes." \
@ -306,7 +322,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
return EfiFile
Basename = os.path.basename(EfiFile)
Output = os.path.join(self.OutputPath, Basename)
CopyLongFilePath(EfiFile, Output)
shutil.copy(EfiFile, Output)
for Pcd in self.PatchPcds:
RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Pcd.DefaultValue, Pcd.MaxDatumSize)
if RetVal:

View File

@ -86,6 +86,8 @@ class FV (FvClassObject):
GenFdsGlobalVariable.ErrorLogger("Capsule %s in FD region can't contain a FV %s in FD region." % (self.CapsuleName, self.UiFvName.upper()))
GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV" %self.UiFvName)
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
FFSGuid = None
if self.FvBaseAddress != None:
BaseAddress = self.FvBaseAddress
@ -130,12 +132,15 @@ class FV (FvClassObject):
OrigFvInfo = None
if os.path.exists (FvInfoFileName):
OrigFvInfo = open(FvInfoFileName, 'r').read()
if GenFdsGlobalVariable.LargeFileInFvFlags[-1]:
FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID;
GenFdsGlobalVariable.GenerateFirmwareVolume(
FvOutputFile,
[self.InfFileName],
AddressFile=FvInfoFileName,
FfsList=FfsFileList,
ForceRebase=self.FvForceRebase
ForceRebase=self.FvForceRebase,
FileSystemGuid=FFSGuid
)
NewFvInfo = None
@ -159,13 +164,16 @@ class FV (FvClassObject):
for FfsFile in self.FfsList :
FileName = FfsFile.GenFfs(MacroDict, FvChildAddr, BaseAddress)
if GenFdsGlobalVariable.LargeFileInFvFlags[-1]:
FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID;
#Update GenFv again
GenFdsGlobalVariable.GenerateFirmwareVolume(
FvOutputFile,
[self.InfFileName],
AddressFile=FvInfoFileName,
FfsList=FfsFileList,
ForceRebase=self.FvForceRebase
ForceRebase=self.FvForceRebase,
FileSystemGuid=FFSGuid
)
#
@ -194,6 +202,7 @@ class FV (FvClassObject):
self.FvAlignment = str (FvAlignmentValue)
FvFileObj.close()
GenFds.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile
GenFdsGlobalVariable.LargeFileInFvFlags.pop()
return FvOutputFile
## __InitializeInf__()

View File

@ -1,7 +1,7 @@
## @file
# generate flash image
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@ -41,7 +41,7 @@ from Common.BuildVersion import gBUILD_VERSION
## Version and Copyright
versionNumber = "1.0" + ' ' + gBUILD_VERSION
__version__ = "%prog Version " + versionNumber
__copyright__ = "Copyright (c) 2007 - 2010, Intel Corporation All rights reserved."
__copyright__ = "Copyright (c) 2007 - 2013, Intel Corporation All rights reserved."
## Tool entrance method
#

View File

@ -65,6 +65,19 @@ class GenFdsGlobalVariable:
BuildRuleFamily = "MSFT"
ToolChainFamily = "MSFT"
__BuildRuleDatabase = None
#
# The list whose element are flags to indicate if large FFS or SECTION files exist in FV.
# At the beginning of each generation of FV, false flag is appended to the list,
# after the call to GenerateSection returns, check the size of the output file,
# if it is greater than 0xFFFFFF, the tail flag in list is set to true,
# and EFI_FIRMWARE_FILE_SYSTEM3_GUID is passed to C GenFv.
# At the end of generation of FV, pop the flag.
# List is used as a stack to handle nested FV generation.
#
LargeFileInFvFlags = []
EFI_FIRMWARE_FILE_SYSTEM3_GUID = '5473C07A-3DCB-4dca-BD6F-1E9689E7349A'
LARGE_FILE_SIZE = 0x1000000
SectionHeader = struct.Struct("3B 1B")
@ -390,11 +403,13 @@ class GenFdsGlobalVariable:
Cmd += Input
SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
if GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
if (os.path.getsize(Output) >= GenFdsGlobalVariable.LARGE_FILE_SIZE and
GenFdsGlobalVariable.LargeFileInFvFlags):
GenFdsGlobalVariable.LargeFileInFvFlags[-1] = True
@staticmethod
def GetAlignment (AlignString):
@ -432,7 +447,7 @@ class GenFdsGlobalVariable:
@staticmethod
def GenerateFirmwareVolume(Output, Input, BaseAddress=None, ForceRebase=None, Capsule=False, Dump=False,
AddressFile=None, MapFile=None, FfsList=[]):
AddressFile=None, MapFile=None, FfsList=[], FileSystemGuid=None):
if not GenFdsGlobalVariable.NeedsUpdate(Output, Input+FfsList):
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
@ -454,6 +469,8 @@ class GenFdsGlobalVariable:
Cmd += ["-a", AddressFile]
if MapFile not in [None, '']:
Cmd += ["-m", MapFile]
if FileSystemGuid:
Cmd += ["-g", FileSystemGuid]
Cmd += ["-o", Output]
for I in Input:
Cmd += ["-i", I]

View File

@ -17,4 +17,4 @@
Build version information
'''
gBUILD_VERSION = "Build 2610"
gBUILD_VERSION = "Build 2640"

View File

@ -1,7 +1,7 @@
## @file
# This file is used to define class for data type structure
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@ -340,6 +340,7 @@ TAB_SLASH = '\\'
TAB_BACK_SLASH = '/'
TAB_SPECIAL_COMMENT = '##'
TAB_HEADER_COMMENT = '@file'
TAB_BINARY_HEADER_COMMENT = '@BinaryHeader'
TAB_STAR = "*"
TAB_EDK_SOURCE = '$(EDK_SOURCE)'
@ -735,7 +736,12 @@ TAB_INF_PATCH_PCD = 'PatchPcd'
TAB_INF_PCD = 'Pcd'
TAB_INF_PCD_EX = 'PcdEx'
TAB_INF_GUIDTYPE_VAR = 'Variable'
TAB_INF_ABSTRACT = 'STR_MODULE_ABSTRACT'
TAB_INF_DESCRIPTION = 'STR_MODULE_DESCRIPTION'
TAB_INF_LICENSE = 'STR_MODULE_LICENSE'
TAB_INF_BINARY_ABSTRACT = 'STR_MODULE_BINARY_ABSTRACT'
TAB_INF_BINARY_DESCRIPTION = 'STR_MODULE_BINARY_DESCRIPTION'
TAB_INF_BINARY_LICENSE = 'STR_MODULE_BINARY_LICENSE'
#
# Dec Definitions
#
@ -745,7 +751,12 @@ TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
TAB_DEC_PACKAGE_ABSTRACT = 'STR_PACKAGE_ABSTRACT'
TAB_DEC_PACKAGE_DESCRIPTION = 'STR_PACKAGE_DESCRIPTION'
TAB_DEC_PACKAGE_LICENSE = 'STR_PACKAGE_LICENSE'
TAB_DEC_BINARY_ABSTRACT = 'STR_PACKAGE_BINARY_ABSTRACT'
TAB_DEC_BINARY_DESCRIPTION = 'STR_PACKAGE_BINARY_DESCRIPTION'
TAB_DEC_BINARY_LICENSE = 'STR_PACKAGE_ASBUILT_LICENSE'
#
# Dsc Definitions
#
@ -814,6 +825,8 @@ TAB_HEADER_ABSTRACT = 'Abstract'
TAB_HEADER_DESCRIPTION = 'Description'
TAB_HEADER_COPYRIGHT = 'Copyright'
TAB_HEADER_LICENSE = 'License'
TAB_BINARY_HEADER_IDENTIFIER = 'BinaryHeader'
TAB_BINARY_HEADER_USERID = 'TianoCore'
#
# Build database path
#

View File

@ -1,7 +1,7 @@
## @file
# This file is used to define strings used in the UPT tool
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@ -42,7 +42,7 @@ MSG_USAGE_STRING = _("\n"
MSG_VERSION_NUMBER = _("1.0")
MSG_VERSION = _("Intel(r) UEFI Packaging Tool (Intel(r) UEFIPT) - Revision " + \
MSG_VERSION_NUMBER)
MSG_COPYRIGHT = _("Copyright (c) 2011 Intel Corporation All Rights Reserved.")
MSG_COPYRIGHT = _("Copyright (c) 2011 - 2013 Intel Corporation All Rights Reserved.")
MSG_VERSION_COPYRIGHT = _("\n %s\n %s" % (MSG_VERSION, MSG_COPYRIGHT))
MSG_USAGE = _("%s [options]\n%s" % ("upt.exe", MSG_VERSION_COPYRIGHT))
MSG_DESCRIPTION = _("The Intel(r) UEFIUPT is used to create, " + \
@ -530,6 +530,10 @@ ERR_COPYRIGHT_MISSING = \
_("Header comment section must have copyright information")
ERR_LICENSE_MISSING = \
_("Header comment section must have license information")
ERR_INVALID_BINARYHEADER_FORMAT = \
_("Binary Header comment section must have abstract,description,copyright,license information")
ERR_MULTIPLE_BINARYHEADER_EXIST = \
_("the inf file at most support one BinaryHeader at the fileheader section.")
ERR_INVALID_COMMENT_FORMAT = _("Comment must start with #")
ERR_USER_ABORT = _("User has stopped the application")
ERR_DIST_EXT_ERROR = \
@ -547,6 +551,8 @@ ERR_INSTALL_FILE_DEC_FILE_ERROR = _("Could not obtain the TokenSpaceGuidCName an
ERR_NOT_SUPPORTED_SA_MODULE = _("Stand-alone module distribution does not allow EDK 1 INF")
ERR_INSTALL_DIST_NOT_FOUND = \
_("Distribution file to be installed is not found in current working directory or workspace: %s")
ERR_BINARY_HEADER_ORDER = _("Binary header must follow the file header.")
ERR_NO_SOURCE_HEADER = _("File header statement \"## @file\" must exist at the first place.")
#
# Expression error message

View File

@ -1,7 +1,7 @@
## @file
# This file is used to parse DEC file. It will consumed by DecParser
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@ -19,6 +19,7 @@ import Logger.Log as Logger
from Logger.ToolError import FILE_PARSE_FAILURE
from Logger.ToolError import FILE_OPEN_FAILURE
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
import Library.DataType as DT
from Library.ParserValidate import IsValidToken
@ -735,6 +736,7 @@ class Dec(_DecBase, _DecComments):
_DecComments.__init__(self)
_DecBase.__init__(self, RawData)
self.BinaryHeadComment = []
self._Define = _DecDefine(RawData)
self._Include = _DecInclude(RawData)
self._Guid = _DecGuid(RawData)
@ -778,8 +780,13 @@ class Dec(_DecBase, _DecComments):
# Parse DEC file
#
def ParseDecComment(self):
IsFileHeader = False
IsBinaryHeader = False
FileHeaderLineIndex = -1
BinaryHeaderLineIndex = -1
while not self._RawData.IsEndOfFile():
Line, Comment = CleanString(self._RawData.GetNextLine())
#
# Header must be pure comment
#
@ -787,14 +794,55 @@ class Dec(_DecBase, _DecComments):
self._RawData.UndoNextLine()
break
if Comment:
if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) and Comment.find(DT.TAB_HEADER_COMMENT) > 0 \
and not Comment[2:Comment.find(DT.TAB_HEADER_COMMENT)].strip():
IsFileHeader = True
IsBinaryHeader = False
FileHeaderLineIndex = self._RawData.LineIndex
#
# Get license information before '@file'
#
if not IsFileHeader and not IsBinaryHeader and Comment and Comment.startswith(DT.TAB_COMMENT_SPLIT) and \
DT.TAB_BINARY_HEADER_COMMENT not in Comment:
self._HeadComment.append((Comment, self._RawData.LineIndex))
if Comment and IsFileHeader and \
not(Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0):
self._HeadComment.append((Comment, self._RawData.LineIndex))
#
# Double '#' indicates end of header comments
#
if not Comment or Comment == DT.TAB_SPECIAL_COMMENT:
if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsFileHeader:
IsFileHeader = False
continue
if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0:
IsBinaryHeader = True
IsFileHeader = False
BinaryHeaderLineIndex = self._RawData.LineIndex
if Comment and IsBinaryHeader:
self.BinaryHeadComment.append((Comment, self._RawData.LineIndex))
#
# Double '#' indicates end of header comments
#
if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsBinaryHeader:
IsBinaryHeader = False
break
if FileHeaderLineIndex > -1 and not IsFileHeader and not IsBinaryHeader:
break
if FileHeaderLineIndex > BinaryHeaderLineIndex and FileHeaderLineIndex > -1 and BinaryHeaderLineIndex > -1:
self._LoggerError(ST.ERR_BINARY_HEADER_ORDER)
if FileHeaderLineIndex == -1:
Logger.Error(TOOL_NAME, FORMAT_INVALID,
ST.ERR_NO_SOURCE_HEADER,
File=self._RawData.Filename)
return
def _StopCurrentParsing(self, Line):

View File

@ -1,7 +1,7 @@
## @file
# This file contained the parser for INF file
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@ -128,6 +128,10 @@ class InfParser(InfSectionParser):
#
HeaderCommentStart = False
HeaderCommentEnd = False
HeaderStarLineNo = -1
BinaryHeaderCommentStart = False
BinaryHeaderCommentEnd = False
BinaryHeaderStarLineNo = -1
#
# While Section ends. parse whole section contents.
@ -196,22 +200,16 @@ class InfParser(InfSectionParser):
#
if Line.startswith(DT.TAB_SPECIAL_COMMENT) and \
(Line.find(DT.TAB_HEADER_COMMENT) > -1) and \
not HeaderCommentStart:
if CurrentSection != DT.MODEL_UNKNOWN:
Logger.Error("Parser",
PARSER_ERROR,
ST.ERR_INF_PARSER_HEADER_FILE,
File=Filename,
Line=LineNo,
RaiseError = Logger.IS_RAISE_ERROR)
else:
CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
#
# Append the first line to section lines.
#
SectionLines.append((Line, LineNo))
HeaderCommentStart = True
continue
not HeaderCommentStart and not HeaderCommentEnd:
CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
#
# Append the first line to section lines.
#
HeaderStarLineNo = LineNo
SectionLines.append((Line, LineNo))
HeaderCommentStart = True
continue
#
# Collect Header content.
@ -226,17 +224,72 @@ class InfParser(InfSectionParser):
#
if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith("#")) and HeaderCommentStart \
and not HeaderCommentEnd:
SectionLines.append((Line, LineNo))
HeaderCommentEnd = True
BinaryHeaderCommentStart = False
BinaryHeaderCommentEnd = False
HeaderCommentStart = False
if Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1:
self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
SectionLines = []
else:
SectionLines.append((Line, LineNo))
#
# Call Header comment parser.
#
self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
SectionLines = []
continue
#
# check whether binary header comment section started
#
if Line.startswith(DT.TAB_SPECIAL_COMMENT) and \
(Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1) and \
not BinaryHeaderCommentStart:
SectionLines = []
CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
#
# Append the first line to section lines.
#
BinaryHeaderStarLineNo = LineNo
SectionLines.append((Line, LineNo))
BinaryHeaderCommentStart = True
HeaderCommentEnd = True
continue
#
# check whether there are more than one binary header exist
#
if Line.startswith(DT.TAB_SPECIAL_COMMENT) and BinaryHeaderCommentStart and \
not BinaryHeaderCommentEnd and (Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1):
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MULTIPLE_BINARYHEADER_EXIST,
File=Filename)
#
# Collect Binary Header content.
#
if (Line.startswith(DT.TAB_COMMENT_SPLIT) and CurrentSection == DT.MODEL_META_DATA_FILE_HEADER) and\
BinaryHeaderCommentStart and not Line.startswith(DT.TAB_SPECIAL_COMMENT) and not\
BinaryHeaderCommentEnd and NextLine != '':
SectionLines.append((Line, LineNo))
continue
#
# Binary Header content end
#
if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith(DT.TAB_COMMENT_SPLIT)) and \
BinaryHeaderCommentStart and not BinaryHeaderCommentEnd:
SectionLines.append((Line, LineNo))
BinaryHeaderCommentStart = False
#
# Call Binary Header comment parser.
#
self.InfHeaderParser(SectionLines, self.InfBinaryHeader, self.FileName, True)
SectionLines = []
BinaryHeaderCommentEnd = True
continue
#
# Find a new section tab
# Or at the last line of INF file,
# need to process the last section.
@ -255,6 +308,10 @@ class InfParser(InfSectionParser):
#
if (Line.startswith(DT.TAB_SECTION_START) and \
Line.find(DT.TAB_SECTION_END) > -1) or LastSectionFalg:
HeaderCommentEnd = True
BinaryHeaderCommentEnd = True
if not LastSectionFalg:
#
# check to prevent '#' inside section header
@ -333,18 +390,17 @@ class InfParser(InfSectionParser):
# Clear section lines
#
SectionLines = []
#
# End of for
#
#
# Found the first section, No file header.
#
if DefineSectionParsedFlag and not HeaderCommentEnd:
if HeaderStarLineNo == -1:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_HEADER_MISSGING,
ST.ERR_NO_SOURCE_HEADER,
File=self.FullPath)
if BinaryHeaderStarLineNo > -1 and HeaderStarLineNo > -1 and HeaderStarLineNo > BinaryHeaderStarLineNo:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_BINARY_HEADER_ORDER,
File=self.FullPath)
#
# EDKII INF should not have EDKI style comment
#
@ -627,4 +683,4 @@ def _ConvertSecNameToType(SectionName):
SectionType = gINF_SECTION_DEF[SectionName.upper()]
return SectionType

View File

@ -1,7 +1,7 @@
## @file
# This file is used to parse a PCD file of .PKG file
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2013, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@ -357,8 +357,10 @@ class MiscellaneousFileXml(object):
Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
if Executable.upper() == "TRUE":
Executable = True
else:
elif Executable.upper() == "FALSE":
Executable = False
else:
Executable = ''
self.Files.append([Filename, Executable])
MiscFile = MiscFileObject()
@ -392,8 +394,10 @@ class MiscellaneousFileXml(object):
OsType = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'OS')
if Executable.upper() == "TRUE":
Executable = True
else:
elif Executable.upper() == "FALSE":
Executable = False
else:
Executable = ''
self.Files.append([Filename, Executable, OsType])
MiscFile = MiscFileObject()

View File

@ -278,7 +278,7 @@ class MetaFileParser(object):
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '':
continue
ItemList = GetSplitValueList(Item, TAB_SPLIT)
ItemList = GetSplitValueList(Item, TAB_SPLIT,2)
# different section should not mix in one section
if self._SectionName != '' and self._SectionName != ItemList[0].upper():
EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
@ -305,7 +305,10 @@ class MetaFileParser(object):
# S2 may be Platform or ModuleType
if len(ItemList) > 2:
S2 = ItemList[2].upper()
if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD:
S2 = ItemList[2]
else:
S2 = ItemList[2].upper()
else:
S2 = 'COMMON'
self._Scope.append([S1, S2])
@ -496,6 +499,8 @@ class InfParser(MetaFileParser):
# parse the file line by line
IsFindBlockComment = False
GetHeaderComment = False
TailComments = []
SectionComments = []
Comments = []
for Index in range(0, len(Content)):
@ -507,6 +512,9 @@ class InfParser(MetaFileParser):
if Line == '':
if Comment:
Comments.append((Comment, Index + 1))
elif GetHeaderComment:
SectionComments.extend(Comments)
Comments = []
continue
if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
IsFindBlockComment = True
@ -527,6 +535,8 @@ class InfParser(MetaFileParser):
self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', 'COMMON',
'COMMON', self._Owner[-1], LNo, -1, LNo, -1, 0)
GetHeaderComment = True
else:
TailComments.extend(SectionComments + Comments)
Comments = []
self._SectionHeaderParser()
# Check invalid sections
@ -602,9 +612,16 @@ class InfParser(MetaFileParser):
self._Store(MODEL_META_DATA_COMMENT, Comment, '', '', Arch, Platform,
LastItem, LineNo, -1, LineNo, -1, 0)
Comments = []
SectionComments = []
TailComments.extend(SectionComments + Comments)
if IsFindBlockComment:
EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
File=self.MetaFile)
# If there are tail comments in INF file, save to database whatever the comments are
for Comment in TailComments:
self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', 'COMMON',
'COMMON', self._Owner[-1], -1, -1, -1, -1, 0)
self._Done()
## Data parser for the format in which there's path

View File

@ -1,7 +1,7 @@
## @file
# This file is used to create a database used by build tool
#
# Copyright (c) 2008 - 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@ -36,6 +36,7 @@ from MetaFileParser import *
from BuildClassObject import *
from WorkspaceCommon import GetDeclaredPcd
from Common.Misc import AnalyzeDscPcd
import re
## Platform build information from DSC file
#
@ -328,6 +329,8 @@ class DscBuildData(PlatformBuildClassObject):
return False
def _GetSkuIdentifier(self):
if self._SkuName:
return self._SkuName
if self._SkuIdentifier == None:
if self._Header == None:
self._GetHeaderInfo()
@ -337,16 +340,14 @@ class DscBuildData(PlatformBuildClassObject):
if self._SkuName == None:
if self._Header == None:
self._GetHeaderInfo()
if self._SkuName == None or self._SkuName not in self.SkuIds:
if (self._SkuName == None or self._SkuName not in self.SkuIds):
self._SkuName = 'DEFAULT'
return self._SkuName
## Override SKUID_IDENTIFIER
def _SetSkuName(self, Value):
if Value in self.SkuIds:
self._SkuName = Value
# Needs to re-retrieve the PCD information
self._Pcds = None
self._SkuName = Value
self._Pcds = None
def _GetFdfFile(self):
if self._FlashDefinition == None:
@ -460,7 +461,7 @@ class DscBuildData(PlatformBuildClassObject):
if Record[1] in [None, '']:
EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
File=self.MetaFile, Line=Record[-1])
self._SkuIds[Record[1].upper()] = Record[0]
self._SkuIds[Record[1]] = Record[0]
if 'DEFAULT' not in self._SkuIds:
self._SkuIds['DEFAULT'] = '0'
if 'COMMON' not in self._SkuIds:
@ -731,7 +732,6 @@ class DscBuildData(PlatformBuildClassObject):
RecordList = self._RawData[Type, self._Arch]
PcdValueDict = sdict()
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
SkuName = SkuName.upper()
if SkuName in (SkuObj.SystemSkuId,'DEFAULT','COMMON'):
PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
PcdDict[Arch, PcdCName, TokenSpaceGuid,SkuName] = Setting
@ -798,7 +798,6 @@ class DscBuildData(PlatformBuildClassObject):
AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
SkuName = SkuName.upper()
if SkuName not in AvailableSkuIdSet:
continue
@ -816,6 +815,16 @@ class DscBuildData(PlatformBuildClassObject):
if (PcdCName,TokenSpaceGuid) in Pcds.keys():
pcdObject = Pcds[PcdCName,TokenSpaceGuid]
pcdObject.SkuInfoList[SkuName] = SkuInfo
if MaxDatumSize.strip():
CurrentMaxSize = int(MaxDatumSize.strip(),0)
else:
CurrentMaxSize = 0
if pcdObject.MaxDatumSize:
PcdMaxSize = int(pcdObject.MaxDatumSize,0)
else:
PcdMaxSize = 0
if CurrentMaxSize > PcdMaxSize:
pcdObject.MaxDatumSize = str(CurrentMaxSize)
else:
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
@ -831,8 +840,8 @@ class DscBuildData(PlatformBuildClassObject):
)
for pcd in Pcds.values():
if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass('DEFAULT', '0', '', '', '', '', '', valuefromDec)
pcd.SkuInfoList['DEFAULT'] = SkuInfo
@ -845,17 +854,7 @@ class DscBuildData(PlatformBuildClassObject):
if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
if SkuObj.SkuUsageType == SkuObj.MULTIPLE:
if pcd.DatumType == "VOID*":
MaxSize = int(pcd.MaxDatumSize,0)
for (skuname,skuobj) in pcd.SkuInfoList.items():
datalen = len(skuobj.DefaultValue)
if datalen>MaxSize:
MaxSize = datalen
pcd.MaxDatumSize = str(MaxSize)
return Pcds
## Retrieve dynamic HII PCD settings
@ -881,7 +880,6 @@ class DscBuildData(PlatformBuildClassObject):
AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
SkuName = SkuName.upper()
if SkuName not in AvailableSkuIdSet:
continue
PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
@ -893,6 +891,20 @@ class DscBuildData(PlatformBuildClassObject):
if Setting == None:
continue
VariableName, VariableGuid, VariableOffset, DefaultValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
ExceedMax = False
if VariableOffset.isdigit():
if int(VariableOffset,10) > 0xFFFF:
ExceedMax = True
elif re.match(r'[\t\s]*0[xX][a-fA-F0-9]+$',VariableOffset):
if int(VariableOffset,16) > 0xFFFF:
ExceedMax = True
else:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid syntax or format of the variable offset value is incorrect for %s." % ".".join((TokenSpaceGuid,PcdCName)))
if ExceedMax:
EdkLogger.error('Build', OPTION_VALUE_INVALID, "The variable offset value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid,PcdCName)))
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName], VariableName, VariableGuid, VariableOffset, DefaultValue)
if (PcdCName,TokenSpaceGuid) in Pcds.keys():
pcdObject = Pcds[PcdCName,TokenSpaceGuid]
@ -914,8 +926,12 @@ class DscBuildData(PlatformBuildClassObject):
for pcd in Pcds.values():
SkuInfoObj = pcd.SkuInfoList.values()[0]
if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
# Only fix the value while no value provided in DSC file.
for sku in pcd.SkuInfoList.values():
if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue==None):
sku.HiiDefaultValue = pcdDecObject.DefaultValue
if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass('DEFAULT', '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec)
pcd.SkuInfoList['DEFAULT'] = SkuInfo
@ -929,6 +945,24 @@ class DscBuildData(PlatformBuildClassObject):
if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
if pcd.MaxDatumSize.strip():
MaxSize = int(pcd.MaxDatumSize,0)
else:
MaxSize = 0
if pcdDecObject.DatumType == 'VOID*':
for (skuname,skuobj) in pcd.SkuInfoList.items():
datalen = 0
if skuobj.HiiDefaultValue.startswith("L"):
datalen = (len(skuobj.HiiDefaultValue)- 3 + 1) * 2
elif skuobj.HiiDefaultValue.startswith("{"):
datalen = len(skuobj.HiiDefaultValue.split(","))
else:
datalen = len(skuobj.HiiDefaultValue) -2 + 1
if datalen>MaxSize:
MaxSize = datalen
pcd.MaxDatumSize = str(MaxSize)
return Pcds
## Retrieve dynamic VPD PCD settings
@ -954,7 +988,6 @@ class DscBuildData(PlatformBuildClassObject):
AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
SkuName = SkuName.upper()
if SkuName not in AvailableSkuIdSet:
continue
@ -976,6 +1009,16 @@ class DscBuildData(PlatformBuildClassObject):
if (PcdCName,TokenSpaceGuid) in Pcds.keys():
pcdObject = Pcds[PcdCName,TokenSpaceGuid]
pcdObject.SkuInfoList[SkuName] = SkuInfo
if MaxDatumSize.strip():
CurrentMaxSize = int(MaxDatumSize.strip(),0)
else:
CurrentMaxSize = 0
if pcdObject.MaxDatumSize:
PcdMaxSize = int(pcdObject.MaxDatumSize,0)
else:
PcdMaxSize = 0
if CurrentMaxSize > PcdMaxSize:
pcdObject.MaxDatumSize = str(CurrentMaxSize)
else:
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
@ -991,8 +1034,8 @@ class DscBuildData(PlatformBuildClassObject):
)
for pcd in Pcds.values():
SkuInfoObj = pcd.SkuInfoList.values()[0]
pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass('DEFAULT', '0', '', '', '','',SkuInfoObj.VpdOffset, valuefromDec)
pcd.SkuInfoList['DEFAULT'] = SkuInfo
@ -1006,14 +1049,6 @@ class DscBuildData(PlatformBuildClassObject):
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
if SkuObj.SkuUsageType == SkuObj.MULTIPLE:
if pcd.MaxDatumSize.strip():
MaxSize = int(pcd.MaxDatumSize,0)
for (skuname,skuobj) in pcd.SkuInfoList.items():
datalen = len(skuobj.DefaultValue)
if datalen>MaxSize:
MaxSize = datalen
pcd.MaxDatumSize = str(MaxSize)
return Pcds
## Add external modules
@ -1520,6 +1555,7 @@ class InfBuildData(ModuleBuildClassObject):
## Set all internal used members of InfBuildData to None
def _Clear(self):
self._HeaderComments = None
self._TailComments = None
self._Header_ = None
self._AutoGenVersion = None
self._BaseName = None
@ -1612,7 +1648,13 @@ class InfBuildData(ModuleBuildClassObject):
for Record in RecordList:
self._HeaderComments.append(Record[0])
return self._HeaderComments
def _GetTailComments(self):
if not self._TailComments:
self._TailComments = []
RecordList = self._RawData[MODEL_META_DATA_TAIL_COMMENT]
for Record in RecordList:
self._TailComments.append(Record[0])
return self._TailComments
## Retrieve all information in [Defines] section
#
# (Retriving all [Defines] information in one-shot is just to save time.)
@ -1769,7 +1811,7 @@ class InfBuildData(ModuleBuildClassObject):
else:
Tool = ToolList[0]
ToolChain = "*_*_*_%s_FLAGS" % Tool
ToolChainFamily = 'MSFT' # Edk.x only support MSFT tool chain
ToolChainFamily = ''
#ignore not replaced macros in value
ValueList = GetSplitList(' ' + Value, '/D')
Dummy = ValueList[0]
@ -2447,6 +2489,7 @@ class InfBuildData(ModuleBuildClassObject):
Platform = property(_GetPlatform, _SetPlatform)
HeaderComments = property(_GetHeaderComments)
TailComments = property(_GetTailComments)
AutoGenVersion = property(_GetInfVersion)
BaseName = property(_GetBaseName)
ModuleType = property(_GetModuleType)

View File

@ -259,7 +259,7 @@ class DepexParser(object):
Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]
if Statement in ["BEFORE", "AFTER", "PUSH"]:
GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \
struct.unpack("LHHBBBBBBBB", DepexFile.read(16))
struct.unpack("=LHHBBBBBBBB", DepexFile.read(16))
GuidString = self._GuidDb.get(GuidValue, GuidValue)
Statement = "%s %s" % (Statement, GuidString)
DepexStatement.append(Statement)