Update ApiEntry.asm to use MACRO instead of direct XMM access. Add sanity parameter check for FSP API. Add sanity return code check for internal API. Call LoadUcode before CarInit to meet silicon requirement. Remove unnecessary VpdBase for PatchTable. Add ASSERT for NULL check FSP1.1 entrypoint. Contributed-under: TianoCore Contribution Agreement 1.0 Signed-off-by: "Yao, Jiewen" <jiewen.yao@intel.com> Reviewed-by: "Rangarajan, Ravi P" <ravi.p.rangarajan@intel.com> Reviewed-by: "Ma, Maurice" <maurice.ma@intel.com> Reviewed-by: "Mudusuru, Giri P" <giri.p.mudusuru@intel.com> git-svn-id: https://svn.code.sf.net/p/edk2/code/trunk/edk2@16834 6f19259b-4bc3-4df7-8a09-765794883524
896 lines
36 KiB
Python
896 lines
36 KiB
Python
## @ GenCfgOpt.py
|
|
#
|
|
# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
|
|
# This program and the accompanying materials are licensed and made available under
|
|
# the terms and conditions of the BSD License that accompanies this distribution.
|
|
# The full text of the license may be found at
|
|
# http://opensource.org/licenses/bsd-license.php.
|
|
#
|
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
#
|
|
##
|
|
|
|
import os
|
|
import re
|
|
import sys
|
|
import struct
|
|
from datetime import date
|
|
|
|
# Generated file copyright header
|
|
|
|
__copyright_txt__ = """## @file
|
|
#
|
|
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
|
|
#
|
|
# This file lists all VPD informations for a platform collected by build.exe.
|
|
#
|
|
# Copyright (c) %4d, Intel Corporation. All rights reserved.<BR>
|
|
# This program and the accompanying materials
|
|
# are licensed and made available under the terms and conditions of the BSD License
|
|
# which accompanies this distribution. The full text of the license may be found at
|
|
# http://opensource.org/licenses/bsd-license.php
|
|
#
|
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
#
|
|
"""
|
|
|
|
__copyright_bsf__ = """/** @file
|
|
|
|
Boot Setting File for Platform Configuration.
|
|
|
|
Copyright (c) %4d, Intel Corporation. All rights reserved.<BR>
|
|
This program and the accompanying materials
|
|
are licensed and made available under the terms and conditions of the BSD License
|
|
which accompanies this distribution. The full text of the license may be found at
|
|
http://opensource.org/licenses/bsd-license.php
|
|
|
|
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
|
|
This file is automatically generated. Please do NOT modify !!!
|
|
|
|
**/
|
|
|
|
"""
|
|
|
|
__copyright_h__ = """/** @file
|
|
|
|
Copyright (c) %4d, Intel Corporation. All rights reserved.<BR>
|
|
|
|
Redistribution and use in source and binary forms, with or without modification,
|
|
are permitted provided that the following conditions are met:
|
|
|
|
* Redistributions of source code must retain the above copyright notice, this
|
|
list of conditions and the following disclaimer.
|
|
* Redistributions in binary form must reproduce the above copyright notice, this
|
|
list of conditions and the following disclaimer in the documentation and/or
|
|
other materials provided with the distribution.
|
|
* Neither the name of Intel Corporation nor the names of its contributors may
|
|
be used to endorse or promote products derived from this software without
|
|
specific prior written permission.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
|
|
THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
This file is automatically generated. Please do NOT modify !!!
|
|
|
|
**/
|
|
"""
|
|
|
|
class CGenCfgOpt:
|
|
def __init__(self):
|
|
self.Error = ''
|
|
|
|
self._GlobalDataDef = """
|
|
GlobalDataDef
|
|
SKUID = 0, "DEFAULT"
|
|
EndGlobalData
|
|
|
|
"""
|
|
self._BuidinOptionTxt = """
|
|
List &EN_DIS
|
|
Selection 0x1 , "Enabled"
|
|
Selection 0x0 , "Disabled"
|
|
EndList
|
|
|
|
"""
|
|
|
|
self._BsfKeyList = ['FIND','NAME','HELP','TYPE','PAGE','OPTION','ORDER']
|
|
self._HdrKeyList = ['HEADER','STRUCT']
|
|
self._BuidinOption = {'$EN_DIS' : 'EN_DIS'}
|
|
|
|
self._MacroDict = {}
|
|
self._CfgBlkDict = {}
|
|
self._CfgPageDict = {}
|
|
self._CfgItemList = []
|
|
self._DscFile = ''
|
|
self._FvDir = ''
|
|
self._MapVer = 0
|
|
|
|
def ParseMacros (self, MacroDefStr):
|
|
# ['-DABC=1', '-D', 'CFG_DEBUG=1', '-D', 'CFG_OUTDIR=Build']
|
|
self._MacroDict = {}
|
|
IsExpression = False
|
|
for Macro in MacroDefStr:
|
|
if Macro.startswith('-D'):
|
|
IsExpression = True
|
|
if len(Macro) > 2:
|
|
Macro = Macro[2:]
|
|
else :
|
|
continue
|
|
if IsExpression:
|
|
IsExpression = False
|
|
Match = re.match("(\w+)=(.+)", Macro)
|
|
if Match:
|
|
self._MacroDict[Match.group(1)] = Match.group(2)
|
|
else:
|
|
Match = re.match("(\w+)", Macro)
|
|
if Match:
|
|
self._MacroDict[Match.group(1)] = ''
|
|
if len(self._MacroDict) == 0:
|
|
self.Error = "Invalid MACRO arguments"
|
|
Error = 1
|
|
else:
|
|
Error = 0
|
|
return Error
|
|
|
|
|
|
def ParseDscFile (self, DscFile, FvDir):
|
|
self._CfgItemList = []
|
|
self._CfgPageDict = {}
|
|
self._CfgBlkDict = {}
|
|
self._DscFile = DscFile
|
|
self._FvDir = FvDir
|
|
|
|
IsDefSect = False
|
|
IsUpdSect = False
|
|
IsVpdSect = False
|
|
Found = False
|
|
|
|
IfStack = [True]
|
|
ElifStack = []
|
|
Error = 0
|
|
|
|
DscFd = open(DscFile, "r")
|
|
DscLines = DscFd.readlines()
|
|
DscFd.close()
|
|
|
|
ConfigDict = {}
|
|
|
|
for DscLine in DscLines:
|
|
Handle = False
|
|
DscLine = DscLine.strip()
|
|
Match = re.match("^\[(.+)\]", DscLine)
|
|
if Match is not None:
|
|
if Match.group(1).lower() == "Defines".lower():
|
|
IsDefSect = True
|
|
IsVpdSect = False
|
|
IsUpdSect = False
|
|
elif Match.group(1).lower() == "PcdsDynamicVpd".lower():
|
|
ConfigDict = {}
|
|
ConfigDict['header'] = 'ON'
|
|
ConfigDict['region'] = 'VPD'
|
|
ConfigDict['order'] = -1
|
|
ConfigDict['page'] = ''
|
|
ConfigDict['name'] = ''
|
|
ConfigDict['find'] = ''
|
|
ConfigDict['struct'] = ''
|
|
ConfigDict['subreg'] = []
|
|
IsDefSect = False
|
|
IsVpdSect = True
|
|
IsUpdSect = False
|
|
elif Match.group(1).lower() == "PcdsDynamicVpd.Upd".lower():
|
|
ConfigDict = {}
|
|
ConfigDict['header'] = 'ON'
|
|
ConfigDict['region'] = 'UPD'
|
|
ConfigDict['order'] = -1
|
|
ConfigDict['page'] = ''
|
|
ConfigDict['name'] = ''
|
|
ConfigDict['find'] = ''
|
|
ConfigDict['struct'] = ''
|
|
ConfigDict['subreg'] = []
|
|
IsDefSect = False
|
|
IsUpdSect = True
|
|
IsVpdSect = False
|
|
Found = True
|
|
else:
|
|
IsDefSect = False
|
|
IsUpdSect = False
|
|
IsVpdSect = False
|
|
else:
|
|
if IsDefSect or IsUpdSect or IsVpdSect:
|
|
if DscLine == "!else":
|
|
IfStack[-1] = not IfStack[-1]
|
|
elif DscLine == "!endif":
|
|
IfStack.pop()
|
|
Level = ElifStack.pop()
|
|
while Level > 0:
|
|
IfStack.pop()
|
|
Level = Level - 1
|
|
else:
|
|
Result = False
|
|
Match = re.match("!(ifdef|ifndef)\s+\$\((\w+)\)", DscLine)
|
|
if Match is not None:
|
|
if Match.group(2) in self._MacroDict:
|
|
if Match.group(1) == 'ifdef':
|
|
Result = True
|
|
else:
|
|
if Match.group(1) == 'ifndef':
|
|
Result = True
|
|
ElifStack.append(0)
|
|
IfStack.append(Result)
|
|
else:
|
|
Match = re.match("!(if|elseif)\s+\$\\((\w+)\)\s*==\s*(\w+|\$\(\w+\))", DscLine)
|
|
if Match is not None:
|
|
if Match.group(2) in self._MacroDict:
|
|
MacroName = self._MacroDict[Match.group(2)]
|
|
else:
|
|
MacroName = ''
|
|
Value = Match.group(3)
|
|
if Value.startswith('$'):
|
|
if Value[2:-1] in self._MacroDict:
|
|
Value = self._MacroDict[Value[2:-1]]
|
|
else:
|
|
Value = ''
|
|
if MacroName == Value:
|
|
Result = True
|
|
if Match.group(1) == "if":
|
|
ElifStack.append(0)
|
|
IfStack.append(Result)
|
|
else: #elseif
|
|
IfStack[-1] = not IfStack[-1]
|
|
IfStack.append(Result)
|
|
ElifStack[-1] = ElifStack[-1] + 1
|
|
else:
|
|
if len(DscLine) > 0 and DscLine[0] == '!':
|
|
#
|
|
# Current it can only handle build switch.
|
|
# It does not support INF file in included dsc.
|
|
#
|
|
else:
|
|
if reduce(lambda x,y: x and y, IfStack):
|
|
Handle = True
|
|
|
|
if not Handle:
|
|
continue
|
|
|
|
if IsDefSect:
|
|
#DEFINE UPD_TOOL_GUID = 8C3D856A-9BE6-468E-850A-24F7A8D38E09
|
|
Match = re.match("^\s*(?:DEFINE\s+)*(\w+)\s*=\s*([-\w]+)", DscLine)
|
|
if Match:
|
|
self._MacroDict[Match.group(1)] = Match.group(2)
|
|
else:
|
|
Match = re.match("^\s*#\s+!(BSF|HDR)\s+(.+)", DscLine)
|
|
if Match:
|
|
Remaining = Match.group(2)
|
|
if Match.group(1) == 'BSF':
|
|
Match = re.match("(?:^|.+\s+)PAGES:{(.+?)}", Remaining)
|
|
if Match:
|
|
# !BSF PAGES:{HSW:"Haswell System Agent", LPT:"Lynx Point PCH"}
|
|
PageList = Match.group(1).split(',')
|
|
for Page in PageList:
|
|
Page = Page.strip()
|
|
Match = re.match("(\w+):\"(.+)\"", Page)
|
|
self._CfgPageDict[Match.group(1)] = Match.group(2)
|
|
|
|
Match = re.match("(?:^|.+\s+)BLOCK:{NAME:\"(.+)\"\s*,\s*VER:\"(.+)\"\s*}", Remaining)
|
|
if Match:
|
|
self._CfgBlkDict['name'] = Match.group(1)
|
|
self._CfgBlkDict['ver'] = Match.group(2)
|
|
|
|
for Key in self._BsfKeyList:
|
|
Match = re.match("(?:^|.+\s+)%s:{(.+?)}" % Key, Remaining)
|
|
if Match:
|
|
if Key in ['HELP', 'OPTION'] and Match.group(1).startswith('+'):
|
|
ConfigDict[Key.lower()] += Match.group(1)[1:]
|
|
else:
|
|
ConfigDict[Key.lower()] = Match.group(1)
|
|
else:
|
|
for Key in self._HdrKeyList:
|
|
Match = re.match("(?:^|.+\s+)%s:{(.+?)}" % Key, Remaining)
|
|
if Match:
|
|
ConfigDict[Key.lower()] = Match.group(1)
|
|
|
|
# Check VPD/UPD
|
|
if IsUpdSect:
|
|
Match = re.match("^([_a-zA-Z0-9]+).([_a-zA-Z0-9]+)\s*\|\s*(0x[0-9A-F]{4})\s*\|\s*(\d+|0x[0-9a-fA-F]+)\s*\|\s*(.+)",DscLine)
|
|
else:
|
|
Match = re.match("^([_a-zA-Z0-9]+).([_a-zA-Z0-9]+)\s*\|\s*(0x[0-9A-F]+)(?:\s*\|\s*(.+))?", DscLine)
|
|
if Match:
|
|
ConfigDict['space'] = Match.group(1)
|
|
ConfigDict['cname'] = Match.group(2)
|
|
ConfigDict['offset'] = int (Match.group(3), 16)
|
|
if ConfigDict['order'] == -1:
|
|
ConfigDict['order'] = ConfigDict['offset'] << 8
|
|
else:
|
|
(Major, Minor) = ConfigDict['order'].split('.')
|
|
ConfigDict['order'] = (int (Major, 16) << 8 ) + int (Minor, 16)
|
|
if IsUpdSect:
|
|
Value = Match.group(5).strip()
|
|
if Match.group(4).startswith("0x"):
|
|
Length = int (Match.group(4), 16)
|
|
else :
|
|
Length = int (Match.group(4))
|
|
else:
|
|
Value = Match.group(4)
|
|
if Value is None:
|
|
Value = ''
|
|
Value = Value.strip()
|
|
if '|' in Value:
|
|
Match = re.match("^.+\s*\|\s*(.+)", Value)
|
|
if Match:
|
|
Value = Match.group(1)
|
|
Length = -1
|
|
|
|
ConfigDict['length'] = Length
|
|
Match = re.match("\$\((\w+)\)", Value)
|
|
if Match:
|
|
if Match.group(1) in self._MacroDict:
|
|
Value = self._MacroDict[Match.group(1)]
|
|
ConfigDict['value'] = Value
|
|
if ConfigDict['name'] == '':
|
|
# Clear BSF specific items
|
|
ConfigDict['help'] = ''
|
|
ConfigDict['type'] = ''
|
|
ConfigDict['option'] = ''
|
|
|
|
self._CfgItemList.append(ConfigDict.copy())
|
|
ConfigDict['name'] = ''
|
|
ConfigDict['find'] = ''
|
|
ConfigDict['struct'] = ''
|
|
ConfigDict['order'] = -1
|
|
ConfigDict['subreg'] = []
|
|
else:
|
|
# It could be a virtual item as below
|
|
# !BSF FIELD:{1:SerialDebugPortAddress0}
|
|
Match = re.match("^\s*#\s+!BSF\s+FIELD:{(.+):(\d+)}", DscLine)
|
|
if Match:
|
|
SubCfgDict = ConfigDict
|
|
SubCfgDict['cname'] = Match.group(1)
|
|
SubCfgDict['length'] = int (Match.group(2))
|
|
if SubCfgDict['length'] > 0:
|
|
LastItem = self._CfgItemList[-1]
|
|
if len(LastItem['subreg']) == 0:
|
|
SubOffset = 0
|
|
else:
|
|
SubOffset += LastItem['subreg'][-1]['length']
|
|
SubCfgDict['offset'] = SubOffset
|
|
LastItem['subreg'].append (SubCfgDict.copy())
|
|
ConfigDict['name'] = ''
|
|
return Error
|
|
|
|
def UpdateSubRegionDefaultValue (self):
|
|
Error = 0
|
|
for Item in self._CfgItemList:
|
|
if len(Item['subreg']) == 0:
|
|
continue
|
|
bytearray = []
|
|
if Item['value'][0] == '{':
|
|
binlist = Item['value'][1:-1].split(',')
|
|
for each in binlist:
|
|
each = each.strip()
|
|
if each.startswith('0x'):
|
|
value = int(each, 16)
|
|
else:
|
|
value = int(each)
|
|
bytearray.append(value)
|
|
else:
|
|
if Item['value'].startswith('0x'):
|
|
value = int(Item['value'], 16)
|
|
else:
|
|
value = int(Item['value'])
|
|
idx = 0;
|
|
while idx < Item['length']:
|
|
bytearray.append(value & 0xFF)
|
|
value = value >> 8
|
|
idx = idx + 1
|
|
for SubItem in Item['subreg']:
|
|
if SubItem['length'] in (1,2,4,8):
|
|
valuelist = [b for b in bytearray[SubItem['offset']:SubItem['offset']+SubItem['length']]]
|
|
valuelist.reverse()
|
|
valuestr = "".join('%02X' % b for b in valuelist)
|
|
SubItem['value'] = '0x%s' % valuestr
|
|
else:
|
|
valuestr = ",".join('0x%02X' % b for b in bytearray[SubItem['offset']:SubItem['offset']+SubItem['length']])
|
|
SubItem['value'] = '{%s}' % valuestr
|
|
return Error
|
|
|
|
def UpdateVpdSizeField (self):
|
|
FvDir = self._FvDir;
|
|
|
|
if 'VPD_TOOL_GUID' not in self._MacroDict:
|
|
self.Error = "VPD_TOOL_GUID definition is missing in DSC file"
|
|
return 1
|
|
|
|
VpdMapFile = os.path.join(FvDir, self._MacroDict['VPD_TOOL_GUID'] + '.map')
|
|
if not os.path.exists(VpdMapFile):
|
|
self.Error = "VPD MAP file '%s' does not exist" % VpdMapFile
|
|
return 2
|
|
|
|
MapFd = open(VpdMapFile, "r")
|
|
MapLines = MapFd.readlines()
|
|
MapFd.close()
|
|
|
|
VpdDict = {}
|
|
PcdDict = {}
|
|
for MapLine in MapLines:
|
|
#gPlatformFspPkgTokenSpaceGuid.PcdVpdRegionSign | DEFAULT | 0x0000 | 8 | 0x534450565F425346
|
|
#gPlatformFspPkgTokenSpaceGuid.PcdVpdRegionSign | 0x0000 | 8 | 0x534450565F425346
|
|
#gPlatformFspPkgTokenSpaceGuid.PcdTest | 0x0008 | 5 | {0x01,0x02,0x03,0x04,0x05}
|
|
Match = re.match("([_a-zA-Z0-9]+).([_a-zA-Z0-9]+)(\s\|\sDEFAULT)?\s\|\s(0x[0-9A-F]{4})\s\|\s(\d+|0x[0-9a-fA-F]+)\s\|\s(\{?[x0-9a-fA-F,\s]+\}?)", MapLine)
|
|
if Match:
|
|
Space = Match.group(1)
|
|
Name = Match.group(2)
|
|
if (self._MapVer == 0) and (Match.group(3) != None):
|
|
self._MapVer = 1
|
|
Offset = int (Match.group(4), 16)
|
|
if Match.group(5).startswith("0x"):
|
|
Length = int (Match.group(5), 16)
|
|
else :
|
|
Length = int (Match.group(5))
|
|
PcdDict["len"] = Length
|
|
PcdDict["value"] = Match.group(6)
|
|
VpdDict[Space+'.'+Name] = dict(PcdDict)
|
|
|
|
for Item in self._CfgItemList:
|
|
if Item['value'] == '':
|
|
Item['value'] = VpdDict[Item['space']+'.'+Item['cname']]['value']
|
|
if Item['length'] == -1:
|
|
Item['length'] = VpdDict[Item['space']+'.'+Item['cname']]['len']
|
|
if Item['struct'] != '':
|
|
Type = Item['struct'].strip()
|
|
if Type.endswith('*') and (Item['length'] != 4):
|
|
self.Error = "Struct pointer '%s' has invalid size" % Type
|
|
return 3
|
|
|
|
return 0
|
|
|
|
def CreateUpdTxtFile (self, UpdTxtFile):
|
|
FvDir = self._FvDir
|
|
if 'UPD_TOOL_GUID' not in self._MacroDict:
|
|
self.Error = "UPD_TOOL_GUID definition is missing in DSC file"
|
|
return 1
|
|
|
|
if UpdTxtFile == '':
|
|
UpdTxtFile = os.path.join(FvDir, self._MacroDict['UPD_TOOL_GUID'] + '.txt')
|
|
|
|
ReCreate = False
|
|
if not os.path.exists(UpdTxtFile):
|
|
ReCreate = True
|
|
else:
|
|
DscTime = os.path.getmtime(self._DscFile)
|
|
TxtTime = os.path.getmtime(UpdTxtFile)
|
|
if DscTime > TxtTime:
|
|
ReCreate = True
|
|
|
|
if not ReCreate:
|
|
# DSC has not been modified yet
|
|
# So don't have to re-generate other files
|
|
self.Error = 'No DSC file change, skip to create UPD TXT file'
|
|
return 256
|
|
|
|
TxtFd = open(UpdTxtFile, "w")
|
|
TxtFd.write("%s\n" % (__copyright_txt__ % date.today().year))
|
|
|
|
NextOffset = 0
|
|
SpaceIdx = 0
|
|
if self._MapVer == 1:
|
|
Default = 'DEFAULT|'
|
|
else:
|
|
Default = ''
|
|
for Item in self._CfgItemList:
|
|
if Item['region'] != 'UPD':
|
|
continue
|
|
Offset = Item['offset']
|
|
if NextOffset < Offset:
|
|
# insert one line
|
|
TxtFd.write("%s.UnusedUpdSpace%d|%s0x%04X|0x%04X|{0}\n" % (Item['space'], SpaceIdx, Default, NextOffset, Offset - NextOffset))
|
|
SpaceIdx = SpaceIdx + 1
|
|
NextOffset = Offset + Item['length']
|
|
TxtFd.write("%s.%s|%s0x%04X|%s|%s\n" % (Item['space'],Item['cname'],Default,Item['offset'],Item['length'],Item['value']))
|
|
TxtFd.close()
|
|
return 0
|
|
|
|
def CreateField (self, Name, Length, Offset, Struct):
|
|
PosName = 28
|
|
PosComment = 30
|
|
|
|
IsArray = False
|
|
if Length == 1:
|
|
Type = "UINT8"
|
|
elif Length == 2:
|
|
Type = "UINT16"
|
|
elif Length == 4:
|
|
Type = "UINT32"
|
|
elif Length == 8:
|
|
Type = "UINT64"
|
|
else:
|
|
Type = "UINT8"
|
|
IsArray = True
|
|
|
|
if Struct != '':
|
|
IsArray = False
|
|
Type = Struct
|
|
|
|
if IsArray:
|
|
Name = Name + '[%d]' % Length
|
|
|
|
if len(Type) < PosName:
|
|
Space1 = PosName - len(Type)
|
|
else:
|
|
Space1 = 1
|
|
|
|
if len(Name) < PosComment:
|
|
Space2 = PosComment - len(Name)
|
|
else:
|
|
Space2 = 1
|
|
|
|
return " %s%s%s;%s/* Offset 0x%04X */\n" % (Type, ' ' * Space1, Name, ' ' * Space2, Offset)
|
|
|
|
|
|
def CreateHeaderFile (self, InputHeaderFile, IsInternal):
|
|
Error = 0
|
|
FvDir = self._FvDir
|
|
|
|
if IsInternal:
|
|
HeaderFile = os.path.join(FvDir, 'VpdHeader.h')
|
|
else:
|
|
HeaderFile = os.path.join(FvDir, 'fsp_vpd.h')
|
|
|
|
# Check if header needs to be recreated
|
|
ReCreate = False
|
|
if IsInternal:
|
|
if not os.path.exists(HeaderFile):
|
|
ReCreate = True
|
|
else:
|
|
DscTime = os.path.getmtime(self._DscFile)
|
|
HeadTime = os.path.getmtime(HeaderFile)
|
|
if not os.path.exists(InputHeaderFile):
|
|
InpTime = HeadTime
|
|
else:
|
|
InpTime = os.path.getmtime(InputHeaderFile)
|
|
if DscTime > HeadTime or InpTime > HeadTime:
|
|
ReCreate = True
|
|
|
|
if not ReCreate:
|
|
self.Error = "No DSC or input header file is changed, skip the header file generating"
|
|
return 256
|
|
|
|
HeaderFd = open(HeaderFile, "w")
|
|
FileBase = os.path.basename(HeaderFile)
|
|
FileName = FileBase.replace(".", "_").upper()
|
|
HeaderFd.write("%s\n" % (__copyright_h__ % date.today().year))
|
|
HeaderFd.write("#ifndef __%s__\n" % FileName)
|
|
HeaderFd.write("#define __%s__\n\n" % FileName)
|
|
HeaderFd.write("#pragma pack(1)\n\n")
|
|
|
|
if InputHeaderFile != '':
|
|
if not os.path.exists(InputHeaderFile):
|
|
self.Error = "Input header file '%s' does not exist" % InputHeaderFile
|
|
return 2
|
|
|
|
InFd = open(InputHeaderFile, "r")
|
|
IncLines = InFd.readlines()
|
|
InFd.close()
|
|
|
|
Export = False
|
|
for Line in IncLines:
|
|
Match = re.search ("!EXPORT\s+EXTERNAL_BOOTLOADER_STRUCT_(BEGIN|END)\s+", Line)
|
|
if Match:
|
|
if Match.group(1) == "BEGIN":
|
|
Export = True
|
|
continue
|
|
else:
|
|
Export = False
|
|
continue
|
|
if Export:
|
|
HeaderFd.write(Line)
|
|
HeaderFd.write("\n\n")
|
|
|
|
for Region in ['UPD', 'VPD']:
|
|
|
|
# Write PcdVpdRegionSign and PcdImageRevision
|
|
if Region[0] == 'V':
|
|
if 'VPD_TOOL_GUID' not in self._MacroDict:
|
|
self.Error = "VPD_TOOL_GUID definition is missing in DSC file"
|
|
Error = 1
|
|
break
|
|
|
|
BinFile = os.path.join(FvDir, self._MacroDict['VPD_TOOL_GUID'] + ".bin")
|
|
if not os.path.exists(BinFile):
|
|
self.Error = "VPD binary file '%s' does not exist" % BinFile
|
|
Error = 2
|
|
break
|
|
|
|
BinFd = open(BinFile, "rb")
|
|
IdStr = BinFd.read(0x08)
|
|
ImageId = struct.unpack('<Q', IdStr)
|
|
ImageRev = struct.unpack('<I', BinFd.read(0x04))
|
|
BinFd.close()
|
|
|
|
HeaderFd.write("#define FSP_IMAGE_ID 0x%016X /* '%s' */\n" % (ImageId[0], IdStr))
|
|
HeaderFd.write("#define FSP_IMAGE_REV 0x%08X \n\n" % ImageRev[0])
|
|
|
|
HeaderFd.write("typedef struct _" + Region[0] + "PD_DATA_REGION {\n")
|
|
NextOffset = 0
|
|
SpaceIdx = 0
|
|
Offset = 0
|
|
|
|
LastVisible = True
|
|
ResvOffset = 0
|
|
ResvIdx = 0
|
|
LineBuffer = []
|
|
for Item in self._CfgItemList:
|
|
if Item['region'] != Region:
|
|
continue
|
|
|
|
NextVisible = LastVisible
|
|
if not IsInternal:
|
|
if LastVisible and (Item['header'] == 'OFF'):
|
|
NextVisible = False
|
|
ResvOffset = Item['offset']
|
|
elif (not LastVisible) and Item['header'] == 'ON':
|
|
NextVisible = True
|
|
Name = "Reserved" + Region[0] + "pdSpace%d" % ResvIdx
|
|
ResvIdx = ResvIdx + 1
|
|
HeaderFd.write(self.CreateField (Name, Item["offset"] - ResvOffset, ResvOffset, ''))
|
|
|
|
if Offset < Item["offset"]:
|
|
if IsInternal or LastVisible:
|
|
Name = "Unused" + Region[0] + "pdSpace%d" % SpaceIdx
|
|
LineBuffer.append(self.CreateField (Name, Item["offset"] - Offset, Offset, ''))
|
|
SpaceIdx = SpaceIdx + 1
|
|
Offset = Item["offset"]
|
|
|
|
if Offset != Item["offset"]:
|
|
print "Unsorted offset 0x%04X\n" % Item["offset"]
|
|
error = 2
|
|
break;
|
|
|
|
LastVisible = NextVisible
|
|
|
|
Offset = Offset + Item["length"]
|
|
if IsInternal or LastVisible:
|
|
for Each in LineBuffer:
|
|
HeaderFd.write (Each)
|
|
LineBuffer = []
|
|
HeaderFd.write(self.CreateField (Item["cname"], Item["length"], Item["offset"], Item['struct']))
|
|
|
|
HeaderFd.write("} " + Region[0] + "PD_DATA_REGION;\n\n")
|
|
HeaderFd.write("#pragma pack()\n\n")
|
|
HeaderFd.write("#endif\n")
|
|
HeaderFd.close()
|
|
|
|
return Error
|
|
|
|
def WriteBsfStruct (self, BsfFd, Item):
|
|
if Item['type'] == "None":
|
|
Space = "gPlatformFspPkgTokenSpaceGuid"
|
|
else:
|
|
Space = Item['space']
|
|
Line = " $%s_%s" % (Space, Item['cname'])
|
|
Match = re.match("\s*\{([x0-9a-fA-F,\s]+)\}\s*", Item['value'])
|
|
if Match:
|
|
DefaultValue = Match.group(1).strip()
|
|
else:
|
|
DefaultValue = Item['value'].strip()
|
|
BsfFd.write(" %s%s%4d bytes $_DEFAULT_ = %s\n" % (Line, ' ' * (64 - len(Line)), Item['length'], DefaultValue))
|
|
TmpList = []
|
|
if Item['type'] == "Combo":
|
|
if not Item['option'] in self._BuidinOption:
|
|
OptList = Item['option'].split(',')
|
|
for Option in OptList:
|
|
Option = Option.strip()
|
|
(OpVal, OpStr) = Option.split(':')
|
|
TmpList.append((OpVal, OpStr))
|
|
return TmpList
|
|
|
|
def WriteBsfOption (self, BsfFd, Item):
|
|
PcdName = Item['space'] + '_' + Item['cname']
|
|
WriteHelp = 0
|
|
if Item['type'] == "Combo":
|
|
if Item['option'] in self._BuidinOption:
|
|
Options = self._BuidinOption[Item['option']]
|
|
else:
|
|
Options = PcdName
|
|
BsfFd.write(' %s $%s, "%s", &%s,\n' % (Item['type'], PcdName, Item['name'], Options));
|
|
WriteHelp = 1
|
|
elif Item['type'].startswith("EditNum"):
|
|
Match = re.match("EditNum\s*,\s*(HEX|DEC)\s*,\s*\((\d+|0x[0-9A-Fa-f]+)\s*,\s*(\d+|0x[0-9A-Fa-f]+)\)", Item['type'])
|
|
if Match:
|
|
BsfFd.write(' EditNum $%s, "%s", %s,\n' % (PcdName, Item['name'], Match.group(1)));
|
|
WriteHelp = 2
|
|
elif Item['type'].startswith("EditText"):
|
|
BsfFd.write(' %s $%s, "%s",\n' % (Item['type'], PcdName, Item['name']));
|
|
WriteHelp = 1
|
|
|
|
if WriteHelp > 0:
|
|
HelpLines = Item['help'].split('\\n\\r')
|
|
FirstLine = True
|
|
for HelpLine in HelpLines:
|
|
if FirstLine:
|
|
FirstLine = False
|
|
BsfFd.write(' Help "%s"\n' % (HelpLine));
|
|
else:
|
|
BsfFd.write(' "%s"\n' % (HelpLine));
|
|
if WriteHelp == 2:
|
|
BsfFd.write(' "Valid range: %s ~ %s"\n' % (Match.group(2), Match.group(3)));
|
|
|
|
def GenerateBsfFile (self, BsfFile):
|
|
|
|
if BsfFile == '':
|
|
self.Error = "BSF output file '%s' is invalid" % BsfFile
|
|
return 1
|
|
|
|
Error = 0
|
|
OptionDict = {}
|
|
BsfFd = open(BsfFile, "w")
|
|
BsfFd.write("%s\n" % (__copyright_bsf__ % date.today().year))
|
|
BsfFd.write("%s\n" % self._GlobalDataDef);
|
|
BsfFd.write("StructDef\n")
|
|
NextOffset = -1
|
|
for Item in self._CfgItemList:
|
|
if Item['find'] != '':
|
|
BsfFd.write('\n Find "%s"\n' % Item['find'])
|
|
NextOffset = Item['offset'] + Item['length']
|
|
if Item['name'] != '':
|
|
if NextOffset != Item['offset']:
|
|
BsfFd.write(" Skip %d bytes\n" % (Item['offset'] - NextOffset))
|
|
if len(Item['subreg']) > 0:
|
|
NextOffset = Item['offset']
|
|
for SubItem in Item['subreg']:
|
|
NextOffset += SubItem['length']
|
|
if SubItem['name'] == '':
|
|
BsfFd.write(" Skip %d bytes\n" % (SubItem['length']))
|
|
else:
|
|
Options = self.WriteBsfStruct(BsfFd, SubItem)
|
|
if len(Options) > 0:
|
|
OptionDict[SubItem['space']+'_'+SubItem['cname']] = Options
|
|
if (Item['offset'] + Item['length']) < NextOffset:
|
|
self.Error = "BSF sub region '%s' length does not match" % (Item['space']+'.'+Item['cname'])
|
|
return 2
|
|
else:
|
|
NextOffset = Item['offset'] + Item['length']
|
|
Options = self.WriteBsfStruct(BsfFd, Item)
|
|
if len(Options) > 0:
|
|
OptionDict[Item['space']+'_'+Item['cname']] = Options
|
|
BsfFd.write("\nEndStruct\n\n")
|
|
|
|
BsfFd.write("%s" % self._BuidinOptionTxt);
|
|
|
|
for Each in OptionDict:
|
|
BsfFd.write("List &%s\n" % Each);
|
|
for Item in OptionDict[Each]:
|
|
BsfFd.write(' Selection %s , "%s"\n' % (Item[0], Item[1]));
|
|
BsfFd.write("EndList\n\n");
|
|
|
|
BsfFd.write("BeginInfoBlock\n");
|
|
BsfFd.write(' PPVer "%s"\n' % (self._CfgBlkDict['ver']));
|
|
BsfFd.write(' Description "%s"\n' % (self._CfgBlkDict['name']));
|
|
BsfFd.write("EndInfoBlock\n\n");
|
|
|
|
for Each in self._CfgPageDict:
|
|
BsfFd.write('Page "%s"\n' % self._CfgPageDict[Each]);
|
|
BsfItems = []
|
|
for Item in self._CfgItemList:
|
|
if Item['name'] != '':
|
|
if Item['page'] != Each:
|
|
continue
|
|
if len(Item['subreg']) > 0:
|
|
for SubItem in Item['subreg']:
|
|
if SubItem['name'] != '':
|
|
BsfItems.append(SubItem)
|
|
else:
|
|
BsfItems.append(Item)
|
|
|
|
BsfItems.sort(key=lambda x: x['order'])
|
|
|
|
for Item in BsfItems:
|
|
self.WriteBsfOption (BsfFd, Item)
|
|
BsfFd.write("EndPage\n\n");
|
|
|
|
BsfFd.close()
|
|
return Error
|
|
|
|
|
|
def Usage():
|
|
print "GenCfgOpt Version 0.50"
|
|
print "Usage:"
|
|
print " GenCfgOpt UPDTXT PlatformDscFile BuildFvDir [TxtOutFile] [-D Macros]"
|
|
print " GenCfgOpt HEADER PlatformDscFile BuildFvDir [InputHFile] [-D Macros]"
|
|
print " GenCfgOpt GENBSF PlatformDscFile BuildFvDir BsfOutFile [-D Macros]"
|
|
|
|
def Main():
|
|
#
|
|
# Parse the options and args
|
|
#
|
|
GenCfgOpt = CGenCfgOpt()
|
|
argc = len(sys.argv)
|
|
if argc < 4:
|
|
Usage()
|
|
return 1
|
|
else:
|
|
DscFile = sys.argv[2]
|
|
if not os.path.exists(DscFile):
|
|
print "ERROR: Cannot open DSC file '%s' !" % DscFile
|
|
return 2
|
|
|
|
OutFile = ''
|
|
if argc > 4:
|
|
if sys.argv[4][0] == '-':
|
|
Start = 4
|
|
else:
|
|
OutFile = sys.argv[4]
|
|
Start = 5
|
|
if GenCfgOpt.ParseMacros(sys.argv[Start:]) != 0:
|
|
print "ERROR: %s !" % GenCfgOpt.Error
|
|
return 3
|
|
|
|
FvDir = sys.argv[3]
|
|
if not os.path.isdir(FvDir):
|
|
print "ERROR: FV folder '%s' is invalid !" % FvDir
|
|
return 4
|
|
|
|
if GenCfgOpt.ParseDscFile(DscFile, FvDir) != 0:
|
|
print "ERROR: %s !" % GenCfgOpt.Error
|
|
return 5
|
|
|
|
|
|
if GenCfgOpt.UpdateVpdSizeField() != 0:
|
|
print "ERROR: %s !" % GenCfgOpt.Error
|
|
return 6
|
|
|
|
if GenCfgOpt.UpdateSubRegionDefaultValue() != 0:
|
|
print "ERROR: %s !" % GenCfgOpt.Error
|
|
return 7
|
|
|
|
if sys.argv[1] == "UPDTXT":
|
|
Ret = GenCfgOpt.CreateUpdTxtFile(OutFile)
|
|
if Ret != 0:
|
|
# No change is detected
|
|
if Ret == 256:
|
|
print "INFO: %s !" % (GenCfgOpt.Error)
|
|
else :
|
|
print "ERROR: %s !" % (GenCfgOpt.Error)
|
|
return Ret
|
|
elif sys.argv[1] == "HEADER":
|
|
Ret = GenCfgOpt.CreateHeaderFile(OutFile, True)
|
|
if Ret != 0:
|
|
# No change is detected
|
|
if Ret == 256:
|
|
print "INFO: %s !" % (GenCfgOpt.Error)
|
|
else :
|
|
print "ERROR: %s !" % (GenCfgOpt.Error)
|
|
return Ret
|
|
if GenCfgOpt.CreateHeaderFile(OutFile, False) != 0:
|
|
print "ERROR: %s !" % GenCfgOpt.Error
|
|
return 8
|
|
elif sys.argv[1] == "GENBSF":
|
|
if GenCfgOpt.GenerateBsfFile(OutFile) != 0:
|
|
print "ERROR: %s !" % GenCfgOpt.Error
|
|
return 9
|
|
else:
|
|
if argc < 5:
|
|
Usage()
|
|
return 1
|
|
print "ERROR: Unknown command '%s' !" % sys.argv[1]
|
|
Usage()
|
|
return 1
|
|
return 0
|
|
return 0
|
|
|
|
|
|
if __name__ == '__main__':
|
|
sys.exit(Main())
|