BaseTools: Clean up source files
1. Do not use tab characters 2. No trailing white space in one line 3. All files must end with CRLF Contributed-under: TianoCore Contribution Agreement 1.1 Signed-off-by: Liming Gao <liming.gao@intel.com> Cc: Yonghong Zhu <yonghong.zhu@intel.com> Reviewed-by: Yonghong Zhu <yonghong.zhu@intel.com>
This commit is contained in:
@@ -2,15 +2,15 @@
|
||||
#
|
||||
# This file is for build version number auto generation
|
||||
#
|
||||
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
|
||||
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
#
|
||||
# This program and the accompanying materials are licensed and made available
|
||||
# under the terms and conditions of the BSD License which accompanies this
|
||||
# distribution. The full text of the license may be found at
|
||||
# This program and the accompanying materials are licensed and made available
|
||||
# under the terms and conditions of the BSD License which accompanies this
|
||||
# distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
gBUILD_VERSION = ""
|
||||
gBUILD_VERSION = "Developer Build based on Revision: Unknown"
|
||||
|
@@ -1,7 +1,7 @@
|
||||
## @file
|
||||
# This file is used to create a database used by ECC tool
|
||||
#
|
||||
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
|
||||
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
@@ -33,7 +33,7 @@ from Table.TableDsc import TableDsc
|
||||
# This class defined the build databse
|
||||
# During the phase of initialization, the database will create all tables and
|
||||
# insert all records of table DataModel
|
||||
#
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
# @param DbPath: A string for the path of the ECC database
|
||||
#
|
||||
@@ -54,7 +54,7 @@ class Database(object):
|
||||
self.TblInf = TableInf(self.Cur)
|
||||
self.TblDec = TableDec(self.Cur)
|
||||
self.TblDsc = TableDsc(self.Cur)
|
||||
|
||||
|
||||
## Initialize build database
|
||||
#
|
||||
# 1. Delete all old existing tables
|
||||
@@ -69,7 +69,7 @@ class Database(object):
|
||||
# self.TblDataModel.Drop()
|
||||
# self.TblDsc.Drop()
|
||||
# self.TblFile.Drop()
|
||||
|
||||
|
||||
#
|
||||
# Create new tables
|
||||
#
|
||||
@@ -78,7 +78,7 @@ class Database(object):
|
||||
self.TblInf.Create()
|
||||
self.TblDec.Create()
|
||||
self.TblDsc.Create()
|
||||
|
||||
|
||||
#
|
||||
# Initialize table DataModel
|
||||
#
|
||||
@@ -91,10 +91,10 @@ class Database(object):
|
||||
#
|
||||
def QueryTable(self, Table):
|
||||
Table.Query()
|
||||
|
||||
|
||||
## Close entire database
|
||||
#
|
||||
# Commit all first
|
||||
# Commit all first
|
||||
# Close the connection and cursor
|
||||
#
|
||||
def Close(self):
|
||||
@@ -110,11 +110,11 @@ class Database(object):
|
||||
if __name__ == '__main__':
|
||||
EdkLogger.Initialize()
|
||||
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
|
||||
|
||||
|
||||
Db = Database(DATABASE_PATH)
|
||||
Db.InitDatabase()
|
||||
Db.QueryTable(Db.TblDataModel)
|
||||
Db.QueryTable(Db.TblDataModel)
|
||||
Db.QueryTable(Db.TblFile)
|
||||
Db.QueryTable(Db.TblDsc)
|
||||
Db.Close()
|
||||
|
||||
|
||||
|
@@ -63,11 +63,11 @@ gDependencyDatabase = {} # arch : {file path : [dependent files list]}
|
||||
_TempInfs = []
|
||||
|
||||
def GetVariableOffset(mapfilepath, efifilepath, varnames):
|
||||
""" Parse map file to get variable offset in current EFI file
|
||||
""" Parse map file to get variable offset in current EFI file
|
||||
@param mapfilepath Map file absolution path
|
||||
@param efifilepath: EFI binary file full path
|
||||
@param varnames iteratable container whose elements are variable names to be searched
|
||||
|
||||
|
||||
@return List whos elements are tuple with variable name and raw offset
|
||||
"""
|
||||
lines = []
|
||||
@@ -77,7 +77,7 @@ def GetVariableOffset(mapfilepath, efifilepath, varnames):
|
||||
f.close()
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
if len(lines) == 0: return None
|
||||
firstline = lines[0].strip()
|
||||
if (firstline.startswith("Archive member included ") and
|
||||
@@ -177,7 +177,7 @@ def _parseGeneral(lines, efifilepath, varnames):
|
||||
continue
|
||||
if line.startswith("entry point at"):
|
||||
status = 3
|
||||
continue
|
||||
continue
|
||||
if status == 1 and len(line) != 0:
|
||||
m = secReGeneral.match(line)
|
||||
assert m is not None, "Fail to parse the section in map file , line is %s" % line
|
||||
@@ -257,7 +257,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
|
||||
#
|
||||
# A temporary INF is copied to database path which must have write permission
|
||||
# The temporary will be removed at the end of build
|
||||
# In case of name conflict, the file name is
|
||||
# In case of name conflict, the file name is
|
||||
# FILE_GUIDBaseName (0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf)
|
||||
#
|
||||
TempFullPath = os.path.join(DbDir,
|
||||
@@ -268,7 +268,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
|
||||
#
|
||||
# To build same module more than once, the module path with FILE_GUID overridden has
|
||||
# the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
|
||||
# in DSC which is used as relative path by C files and other files in INF.
|
||||
# in DSC which is used as relative path by C files and other files in INF.
|
||||
# A trick was used: all module paths are PathClass instances, after the initialization
|
||||
# of PathClass, the PathClass.Path is overridden by the temporary INF path.
|
||||
#
|
||||
@@ -287,7 +287,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
|
||||
# If file exists, compare contents
|
||||
#
|
||||
if os.path.exists(TempFullPath):
|
||||
with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
|
||||
with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
|
||||
if f1.read() == f2.read():
|
||||
return RtPath
|
||||
_TempInfs.append(TempFullPath)
|
||||
@@ -1545,29 +1545,29 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
|
||||
# Used to avoid split issue while the value string contain "|" character
|
||||
#
|
||||
# @param[in] Setting: A String contain value/datum type/token number information;
|
||||
#
|
||||
# @retval ValueList: A List contain value, datum type and toke number.
|
||||
#
|
||||
# @retval ValueList: A List contain value, datum type and toke number.
|
||||
#
|
||||
def AnalyzePcdData(Setting):
|
||||
ValueList = ['', '', '']
|
||||
|
||||
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
|
||||
PtrValue = ValueRe.findall(Setting)
|
||||
|
||||
|
||||
ValueUpdateFlag = False
|
||||
|
||||
|
||||
if len(PtrValue) >= 1:
|
||||
Setting = re.sub(ValueRe, '', Setting)
|
||||
ValueUpdateFlag = True
|
||||
|
||||
TokenList = Setting.split(TAB_VALUE_SPLIT)
|
||||
ValueList[0:len(TokenList)] = TokenList
|
||||
|
||||
|
||||
if ValueUpdateFlag:
|
||||
ValueList[0] = PtrValue[0]
|
||||
|
||||
return ValueList
|
||||
|
||||
|
||||
return ValueList
|
||||
|
||||
## check format of PCD value against its the datum type
|
||||
#
|
||||
# For PCD value setting
|
||||
@@ -1770,7 +1770,7 @@ class PathClass(object):
|
||||
OtherKey = Other.Path
|
||||
else:
|
||||
OtherKey = str(Other)
|
||||
|
||||
|
||||
SelfKey = self.Path
|
||||
if SelfKey == OtherKey:
|
||||
return 0
|
||||
@@ -1908,7 +1908,7 @@ class PeImageClass():
|
||||
def _ByteListToStr(self, ByteList):
|
||||
String = ''
|
||||
for index in range(len(ByteList)):
|
||||
if ByteList[index] == 0:
|
||||
if ByteList[index] == 0:
|
||||
break
|
||||
String += chr(ByteList[index])
|
||||
return String
|
||||
@@ -1945,11 +1945,11 @@ class DefaultStore():
|
||||
if sid == minid:
|
||||
return name
|
||||
class SkuClass():
|
||||
|
||||
|
||||
DEFAULT = 0
|
||||
SINGLE = 1
|
||||
MULTIPLE =2
|
||||
|
||||
|
||||
def __init__(self,SkuIdentifier='', SkuIds=None):
|
||||
if SkuIds is None:
|
||||
SkuIds = {}
|
||||
@@ -1961,7 +1961,7 @@ class SkuClass():
|
||||
EdkLogger.error("build", PARAMETER_INVALID,
|
||||
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
|
||||
% (SkuName, SkuId))
|
||||
|
||||
|
||||
self.AvailableSkuIds = sdict()
|
||||
self.SkuIdSet = []
|
||||
self.SkuIdNumberSet = []
|
||||
@@ -1975,10 +1975,10 @@ class SkuClass():
|
||||
self.SkuIdSet = SkuIds.keys()
|
||||
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
|
||||
else:
|
||||
r = SkuIdentifier.split('|')
|
||||
r = SkuIdentifier.split('|')
|
||||
self.SkuIdSet=[(r[k].strip()).upper() for k in range(len(r))]
|
||||
k = None
|
||||
try:
|
||||
try:
|
||||
self.SkuIdNumberSet = [SkuIds[k][0].strip() + 'U' for k in self.SkuIdSet]
|
||||
except Exception:
|
||||
EdkLogger.error("build", PARAMETER_INVALID,
|
||||
@@ -2027,7 +2027,7 @@ class SkuClass():
|
||||
skuorderset = []
|
||||
for skuname in self.SkuIdSet:
|
||||
skuorderset.append(self.GetSkuChain(skuname))
|
||||
|
||||
|
||||
skuorder = []
|
||||
for index in range(max(len(item) for item in skuorderset)):
|
||||
for subset in skuorderset:
|
||||
@@ -2039,8 +2039,8 @@ class SkuClass():
|
||||
|
||||
return skuorder
|
||||
|
||||
def __SkuUsageType(self):
|
||||
|
||||
def __SkuUsageType(self):
|
||||
|
||||
if self.__SkuIdentifier.upper() == "ALL":
|
||||
return SkuClass.MULTIPLE
|
||||
|
||||
@@ -2073,7 +2073,7 @@ class SkuClass():
|
||||
return ArrayStr
|
||||
def __GetAvailableSkuIds(self):
|
||||
return self.AvailableSkuIds
|
||||
|
||||
|
||||
def __GetSystemSkuID(self):
|
||||
if self.__SkuUsageType() == SkuClass.SINGLE:
|
||||
if len(self.SkuIdSet) == 1:
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# This file is required to make Python interpreter treat the directory
|
||||
# as containing package.
|
||||
#
|
||||
# Copyright (c) 2015 - 2016, Intel Corporation. All rights reserved.<BR>
|
||||
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
@@ -20,16 +20,16 @@ from Common.DataType import TAB_WORKSPACE
|
||||
## MultipleWorkspace
|
||||
#
|
||||
# This class manage multiple workspace behavior
|
||||
#
|
||||
#
|
||||
# @param class:
|
||||
#
|
||||
# @var WORKSPACE: defined the current WORKSPACE
|
||||
# @var PACKAGES_PATH: defined the other WORKSAPCE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
|
||||
#
|
||||
#
|
||||
class MultipleWorkspace(object):
|
||||
WORKSPACE = ''
|
||||
PACKAGES_PATH = None
|
||||
|
||||
|
||||
## convertPackagePath()
|
||||
#
|
||||
# Convert path to match workspace.
|
||||
@@ -59,7 +59,7 @@ class MultipleWorkspace(object):
|
||||
cls.PACKAGES_PATH = [cls.convertPackagePath (Ws, os.path.normpath(Path.strip())) for Path in PackagesPath.split(os.pathsep)]
|
||||
else:
|
||||
cls.PACKAGES_PATH = []
|
||||
|
||||
|
||||
## join()
|
||||
#
|
||||
# rewrite os.path.join function
|
||||
@@ -79,7 +79,7 @@ class MultipleWorkspace(object):
|
||||
return Path
|
||||
Path = os.path.join(Ws, *p)
|
||||
return Path
|
||||
|
||||
|
||||
## relpath()
|
||||
#
|
||||
# rewrite os.path.relpath function
|
||||
@@ -98,7 +98,7 @@ class MultipleWorkspace(object):
|
||||
if Path.lower().startswith(Ws.lower()):
|
||||
Path = os.path.relpath(Path, Ws)
|
||||
return Path
|
||||
|
||||
|
||||
## getWs()
|
||||
#
|
||||
# get valid workspace for the path
|
||||
@@ -117,7 +117,7 @@ class MultipleWorkspace(object):
|
||||
if os.path.exists(absPath):
|
||||
return Pkg
|
||||
return Ws
|
||||
|
||||
|
||||
## handleWsMacro()
|
||||
#
|
||||
# handle the $(WORKSPACE) tag, if current workspace is invalid path relative the tool, replace it.
|
||||
@@ -143,7 +143,7 @@ class MultipleWorkspace(object):
|
||||
PathList[i] = str[0:MacroStartPos] + Path
|
||||
PathStr = ' '.join(PathList)
|
||||
return PathStr
|
||||
|
||||
|
||||
## getPkgPath()
|
||||
#
|
||||
# get all package pathes.
|
||||
@@ -153,4 +153,4 @@ class MultipleWorkspace(object):
|
||||
@classmethod
|
||||
def getPkgPath(cls):
|
||||
return cls.PACKAGES_PATH
|
||||
|
||||
|
||||
|
@@ -43,7 +43,7 @@ ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOO
|
||||
|
||||
class RangeObject(object):
|
||||
def __init__(self, start, end, empty = False):
|
||||
|
||||
|
||||
if int(start) < int(end):
|
||||
self.start = int(start)
|
||||
self.end = int(end)
|
||||
@@ -55,24 +55,24 @@ class RangeObject(object):
|
||||
class RangeContainer(object):
|
||||
def __init__(self):
|
||||
self.rangelist = []
|
||||
|
||||
|
||||
def push(self, RangeObject):
|
||||
self.rangelist.append(RangeObject)
|
||||
self.rangelist = sorted(self.rangelist, key = lambda rangeobj : rangeobj.start)
|
||||
self.merge()
|
||||
|
||||
|
||||
def pop(self):
|
||||
for item in self.rangelist:
|
||||
yield item
|
||||
|
||||
def __clean__(self):
|
||||
|
||||
def __clean__(self):
|
||||
newrangelist = []
|
||||
for rangeobj in self.rangelist:
|
||||
if rangeobj.empty == True:
|
||||
continue
|
||||
else:
|
||||
newrangelist.append(rangeobj)
|
||||
self.rangelist = newrangelist
|
||||
self.rangelist = newrangelist
|
||||
def merge(self):
|
||||
self.__clean__()
|
||||
for i in range(0, len(self.rangelist) - 1):
|
||||
@@ -80,23 +80,23 @@ class RangeContainer(object):
|
||||
continue
|
||||
else:
|
||||
self.rangelist[i + 1].start = self.rangelist[i].start
|
||||
self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
|
||||
self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
|
||||
self.rangelist[i].empty = True
|
||||
|
||||
self.__clean__()
|
||||
|
||||
|
||||
def dump(self):
|
||||
print("----------------------")
|
||||
rangelist = ""
|
||||
for object in self.rangelist:
|
||||
rangelist = rangelist + "[%d , %d]" % (object.start, object.end)
|
||||
print(rangelist)
|
||||
|
||||
|
||||
class XOROperatorObject(object):
|
||||
def __init__(self):
|
||||
|
||||
|
||||
class XOROperatorObject(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
if isinstance(Operand, type('')) and not Operand.isalnum():
|
||||
Expr = "XOR ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
@@ -108,9 +108,9 @@ class XOROperatorObject(object):
|
||||
return rangeId
|
||||
|
||||
class LEOperatorObject(object):
|
||||
def __init__(self):
|
||||
def __init__(self):
|
||||
pass
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
if isinstance(Operand, type('')) and not Operand.isalnum():
|
||||
Expr = "LE ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
@@ -120,22 +120,22 @@ class LEOperatorObject(object):
|
||||
SymbolTable[rangeId1] = rangeContainer
|
||||
return rangeId1
|
||||
class LTOperatorObject(object):
|
||||
def __init__(self):
|
||||
def __init__(self):
|
||||
pass
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
if isinstance(Operand, type('')) and not Operand.isalnum():
|
||||
Expr = "LT ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
Expr = "LT ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
rangeId1 = str(uuid.uuid1())
|
||||
rangeContainer = RangeContainer()
|
||||
rangeContainer.push(RangeObject(0, int(Operand) - 1))
|
||||
SymbolTable[rangeId1] = rangeContainer
|
||||
return rangeId1
|
||||
return rangeId1
|
||||
|
||||
class GEOperatorObject(object):
|
||||
def __init__(self):
|
||||
def __init__(self):
|
||||
pass
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
if isinstance(Operand, type('')) and not Operand.isalnum():
|
||||
Expr = "GE ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
@@ -143,12 +143,12 @@ class GEOperatorObject(object):
|
||||
rangeContainer = RangeContainer()
|
||||
rangeContainer.push(RangeObject(int(Operand), MAX_VAL_TYPE[DataType]))
|
||||
SymbolTable[rangeId1] = rangeContainer
|
||||
return rangeId1
|
||||
|
||||
return rangeId1
|
||||
|
||||
class GTOperatorObject(object):
|
||||
def __init__(self):
|
||||
def __init__(self):
|
||||
pass
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
if isinstance(Operand, type('')) and not Operand.isalnum():
|
||||
Expr = "GT ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
@@ -156,12 +156,12 @@ class GTOperatorObject(object):
|
||||
rangeContainer = RangeContainer()
|
||||
rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
|
||||
SymbolTable[rangeId1] = rangeContainer
|
||||
return rangeId1
|
||||
|
||||
return rangeId1
|
||||
|
||||
class EQOperatorObject(object):
|
||||
def __init__(self):
|
||||
def __init__(self):
|
||||
pass
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
def Calculate(self, Operand, DataType, SymbolTable):
|
||||
if isinstance(Operand, type('')) and not Operand.isalnum():
|
||||
Expr = "EQ ..."
|
||||
raise BadExpression(ERR_SNYTAX % Expr)
|
||||
@@ -169,8 +169,8 @@ class EQOperatorObject(object):
|
||||
rangeContainer = RangeContainer()
|
||||
rangeContainer.push(RangeObject(int(Operand), int(Operand)))
|
||||
SymbolTable[rangeId1] = rangeContainer
|
||||
return rangeId1
|
||||
|
||||
return rangeId1
|
||||
|
||||
def GetOperatorObject(Operator):
|
||||
if Operator == '>':
|
||||
return GTOperatorObject()
|
||||
@@ -214,8 +214,8 @@ class RangeExpression(BaseExpression):
|
||||
NumberDict[HexNumber] = Number
|
||||
for HexNum in NumberDict:
|
||||
expr = expr.replace(HexNum, NumberDict[HexNum])
|
||||
|
||||
rangedict = {}
|
||||
|
||||
rangedict = {}
|
||||
for validrange in self.RangePattern.findall(expr):
|
||||
start, end = validrange.split(" - ")
|
||||
start = start.strip()
|
||||
@@ -225,19 +225,19 @@ class RangeExpression(BaseExpression):
|
||||
rangeContainer.push(RangeObject(start, end))
|
||||
self.operanddict[str(rangeid)] = rangeContainer
|
||||
rangedict[validrange] = str(rangeid)
|
||||
|
||||
|
||||
for validrange in rangedict:
|
||||
expr = expr.replace(validrange, rangedict[validrange])
|
||||
|
||||
self._Expr = expr
|
||||
|
||||
self._Expr = expr
|
||||
return expr
|
||||
|
||||
|
||||
|
||||
|
||||
def EvalRange(self, Operator, Oprand):
|
||||
|
||||
operatorobj = GetOperatorObject(Operator)
|
||||
return operatorobj.Calculate(Oprand, self.PcdDataType, self.operanddict)
|
||||
|
||||
|
||||
def Rangeintersection(self, Oprand1, Oprand2):
|
||||
rangeContainer1 = self.operanddict[Oprand1]
|
||||
rangeContainer2 = self.operanddict[Oprand2]
|
||||
@@ -266,35 +266,35 @@ class RangeExpression(BaseExpression):
|
||||
elif end1 >= end2:
|
||||
rangeid = str(uuid.uuid1())
|
||||
rangeContainer.push(RangeObject(start2, end2))
|
||||
|
||||
|
||||
self.operanddict[rangeid] = rangeContainer
|
||||
# rangeContainer.dump()
|
||||
return rangeid
|
||||
|
||||
|
||||
def Rangecollections(self, Oprand1, Oprand2):
|
||||
|
||||
rangeContainer1 = self.operanddict[Oprand1]
|
||||
rangeContainer2 = self.operanddict[Oprand2]
|
||||
rangeContainer = RangeContainer()
|
||||
|
||||
|
||||
for rangeobj in rangeContainer2.pop():
|
||||
rangeContainer.push(rangeobj)
|
||||
for rangeobj in rangeContainer1.pop():
|
||||
rangeContainer.push(rangeobj)
|
||||
|
||||
|
||||
rangeid = str(uuid.uuid1())
|
||||
self.operanddict[rangeid] = rangeContainer
|
||||
|
||||
|
||||
# rangeContainer.dump()
|
||||
return rangeid
|
||||
|
||||
|
||||
|
||||
|
||||
def NegtiveRange(self, Oprand1):
|
||||
rangeContainer1 = self.operanddict[Oprand1]
|
||||
|
||||
|
||||
|
||||
|
||||
rangeids = []
|
||||
|
||||
|
||||
for rangeobj in rangeContainer1.pop():
|
||||
rangeContainer = RangeContainer()
|
||||
rangeid = str(uuid.uuid1())
|
||||
@@ -321,13 +321,13 @@ class RangeExpression(BaseExpression):
|
||||
re = self.Rangeintersection(rangeids[0], rangeids[1])
|
||||
for i in range(2, len(rangeids)):
|
||||
re = self.Rangeintersection(re, rangeids[i])
|
||||
|
||||
|
||||
rangeid2 = str(uuid.uuid1())
|
||||
self.operanddict[rangeid2] = self.operanddict[re]
|
||||
return rangeid2
|
||||
|
||||
|
||||
def Eval(self, Operator, Oprand1, Oprand2 = None):
|
||||
|
||||
|
||||
if Operator in ["!", "NOT", "not"]:
|
||||
if not gGuidPattern.match(Oprand1.strip()):
|
||||
raise BadExpression(ERR_STRING_EXPR % Operator)
|
||||
@@ -338,7 +338,7 @@ class RangeExpression(BaseExpression):
|
||||
elif Operator == 'and' :
|
||||
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
|
||||
raise BadExpression(ERR_STRING_EXPR % Operator)
|
||||
return self.Rangeintersection(Oprand1, Oprand2)
|
||||
return self.Rangeintersection(Oprand1, Oprand2)
|
||||
elif Operator == 'or':
|
||||
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
|
||||
raise BadExpression(ERR_STRING_EXPR % Operator)
|
||||
@@ -369,11 +369,11 @@ class RangeExpression(BaseExpression):
|
||||
self._Len = len(self._Expr)
|
||||
self._Token = ''
|
||||
self._WarnExcept = None
|
||||
|
||||
|
||||
|
||||
# Literal token without any conversion
|
||||
self._LiteralToken = ''
|
||||
|
||||
|
||||
# store the operand object
|
||||
self.operanddict = {}
|
||||
# The Pcd max value depends on PcdDataType
|
||||
@@ -393,9 +393,9 @@ class RangeExpression(BaseExpression):
|
||||
self._Depth = Depth
|
||||
|
||||
self._Expr = self._Expr.strip()
|
||||
|
||||
|
||||
self.preProcessRangeExpr(self._Expr)
|
||||
|
||||
|
||||
# check if the expression does not need to evaluate
|
||||
if RealValue and Depth == 0:
|
||||
self._Token = self._Expr
|
||||
@@ -407,12 +407,12 @@ class RangeExpression(BaseExpression):
|
||||
|
||||
Val = self._OrExpr()
|
||||
RealVal = Val
|
||||
|
||||
|
||||
RangeIdList = RealVal.split("or")
|
||||
RangeList = []
|
||||
for rangeid in RangeIdList:
|
||||
RangeList.append(self.operanddict[rangeid.strip()])
|
||||
|
||||
|
||||
return RangeList
|
||||
|
||||
# Template function to parse binary operators which have same precedence
|
||||
|
@@ -839,7 +839,7 @@ def StringToArray(String):
|
||||
return "{%s,0x00}" % ",".join(C.strip() for C in String[1:-1].split(','))
|
||||
else:
|
||||
return "{%s}" % ",".join(C.strip() for C in String[1:-1].split(','))
|
||||
|
||||
|
||||
else:
|
||||
if len(String.split()) % 2:
|
||||
return '{%s,0}' % ','.join(String.split())
|
||||
|
@@ -1,9 +1,9 @@
|
||||
# # @file
|
||||
#
|
||||
#
|
||||
# This file is used to handle the variable attributes and property information
|
||||
#
|
||||
#
|
||||
# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
|
||||
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
@@ -12,7 +12,7 @@
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
|
||||
class VariableAttributes(object):
|
||||
EFI_VARIABLE_NON_VOLATILE = 0x00000001
|
||||
EFI_VARIABLE_BOOTSERVICE_ACCESS = 0x00000002
|
||||
@@ -24,22 +24,22 @@ class VariableAttributes(object):
|
||||
"RT":EFI_VARIABLE_RUNTIME_ACCESS,
|
||||
"RO":VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
|
||||
}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
@staticmethod
|
||||
def GetVarAttributes(var_attr_str):
|
||||
VarAttr = 0x00000000
|
||||
VarProp = 0x00000000
|
||||
|
||||
|
||||
attr_list = var_attr_str.split(",")
|
||||
for attr in attr_list:
|
||||
attr = attr.strip()
|
||||
if attr == 'RO':
|
||||
VarProp = VariableAttributes.VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
|
||||
else:
|
||||
VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
|
||||
VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
|
||||
return VarAttr, VarProp
|
||||
@staticmethod
|
||||
def ValidateVarAttributes(var_attr_str):
|
||||
|
@@ -1,9 +1,9 @@
|
||||
## @file
|
||||
#
|
||||
#
|
||||
# This package manage the VPD PCD information file which will be generated
|
||||
# by build tool's autogen.
|
||||
# The VPD PCD information file will be input for third-party BPDG tool which
|
||||
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
|
||||
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
|
||||
#
|
||||
#
|
||||
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
@@ -33,8 +33,8 @@ FILE_COMMENT_TEMPLATE = \
|
||||
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
|
||||
#
|
||||
# This file lists all VPD informations for a platform collected by build.exe.
|
||||
#
|
||||
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
|
||||
#
|
||||
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
@@ -70,17 +70,17 @@ FILE_COMMENT_TEMPLATE = \
|
||||
#
|
||||
class VpdInfoFile:
|
||||
|
||||
_rVpdPcdLine = None
|
||||
_rVpdPcdLine = None
|
||||
## Constructor
|
||||
def __init__(self):
|
||||
## Dictionary for VPD in following format
|
||||
#
|
||||
# Key : PcdClassObject instance.
|
||||
# Key : PcdClassObject instance.
|
||||
# @see BuildClassObject.PcdClassObject
|
||||
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
|
||||
self._VpdArray = {}
|
||||
self._VpdInfo = {}
|
||||
|
||||
|
||||
## Add a VPD PCD collected from platform's autogen when building.
|
||||
#
|
||||
# @param vpds The list of VPD PCD collected for a platform.
|
||||
@@ -91,40 +91,40 @@ class VpdInfoFile:
|
||||
def Add(self, Vpd, skuname, Offset):
|
||||
if (Vpd is None):
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
|
||||
|
||||
|
||||
if not (Offset >= 0 or Offset == "*"):
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
|
||||
|
||||
|
||||
if Vpd.DatumType == TAB_VOID:
|
||||
if Vpd.MaxDatumSize <= 0:
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
|
||||
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
|
||||
if not Vpd.MaxDatumSize:
|
||||
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
|
||||
else:
|
||||
if Vpd.MaxDatumSize <= 0:
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||
|
||||
|
||||
if Vpd not in self._VpdArray:
|
||||
#
|
||||
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
|
||||
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
|
||||
#
|
||||
self._VpdArray[Vpd] = {}
|
||||
|
||||
self._VpdArray[Vpd].update({skuname:Offset})
|
||||
|
||||
|
||||
|
||||
|
||||
## Generate VPD PCD information into a text file
|
||||
#
|
||||
#
|
||||
# If parameter FilePath is invalid, then assert.
|
||||
# If
|
||||
# If
|
||||
# @param FilePath The given file path which would hold VPD information
|
||||
def Write(self, FilePath):
|
||||
if not (FilePath is not None or len(FilePath) != 0):
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
"Invalid parameter FilePath: %s." % FilePath)
|
||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||
"Invalid parameter FilePath: %s." % FilePath)
|
||||
|
||||
Content = FILE_COMMENT_TEMPLATE
|
||||
Pcds = sorted(self._VpdArray.keys())
|
||||
@@ -155,15 +155,15 @@ class VpdInfoFile:
|
||||
try:
|
||||
fd = open(FilePath, "r")
|
||||
except:
|
||||
EdkLogger.error("VpdInfoFile",
|
||||
BuildToolError.FILE_OPEN_FAILURE,
|
||||
EdkLogger.error("VpdInfoFile",
|
||||
BuildToolError.FILE_OPEN_FAILURE,
|
||||
"Fail to open file %s for written." % FilePath)
|
||||
Lines = fd.readlines()
|
||||
for Line in Lines:
|
||||
Line = Line.strip()
|
||||
if len(Line) == 0 or Line.startswith("#"):
|
||||
continue
|
||||
|
||||
|
||||
#
|
||||
# the line must follow output format defined in BPDG spec.
|
||||
#
|
||||
@@ -173,9 +173,9 @@ class VpdInfoFile:
|
||||
TokenSpaceName, PcdTokenName = PcdName.split(".")
|
||||
except:
|
||||
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
|
||||
|
||||
|
||||
Found = False
|
||||
|
||||
|
||||
if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
|
||||
self._VpdInfo[(TokenSpaceName, PcdTokenName)] = []
|
||||
self._VpdInfo[(TokenSpaceName, PcdTokenName)].append((SkuId, Offset, Value))
|
||||
@@ -188,62 +188,62 @@ class VpdInfoFile:
|
||||
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObjectTokenCName == PcdTokenName.strip() and sku == SkuId:
|
||||
if self._VpdArray[VpdObject][sku] == "*":
|
||||
if Offset == "*":
|
||||
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
|
||||
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
|
||||
self._VpdArray[VpdObject][sku] = Offset
|
||||
Found = True
|
||||
if not Found:
|
||||
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
|
||||
|
||||
|
||||
## Get count of VPD PCD collected from platform's autogen when building.
|
||||
#
|
||||
# @return The integer count value
|
||||
# @return The integer count value
|
||||
def GetCount(self):
|
||||
Count = 0
|
||||
for OffsetList in self._VpdArray.values():
|
||||
Count += len(OffsetList)
|
||||
|
||||
|
||||
return Count
|
||||
|
||||
|
||||
## Get an offset value for a given VPD PCD
|
||||
#
|
||||
# Because BPDG only support one Sku, so only return offset for SKU default.
|
||||
# Because BPDG only support one Sku, so only return offset for SKU default.
|
||||
#
|
||||
# @param vpd A given VPD PCD
|
||||
# @param vpd A given VPD PCD
|
||||
def GetOffset(self, vpd):
|
||||
if vpd not in self._VpdArray:
|
||||
return None
|
||||
|
||||
|
||||
if len(self._VpdArray[vpd]) == 0:
|
||||
return None
|
||||
|
||||
|
||||
return self._VpdArray[vpd]
|
||||
def GetVpdInfo(self, arg):
|
||||
(PcdTokenName, TokenSpaceName) = arg
|
||||
return self._VpdInfo.get((TokenSpaceName, PcdTokenName))
|
||||
|
||||
|
||||
## Call external BPDG tool to process VPD file
|
||||
#
|
||||
#
|
||||
# @param ToolPath The string path name for BPDG tool
|
||||
# @param VpdFileName The string path name for VPD information guid.txt
|
||||
#
|
||||
#
|
||||
def CallExtenalBPDGTool(ToolPath, VpdFileName):
|
||||
assert ToolPath is not None, "Invalid parameter ToolPath"
|
||||
assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
|
||||
|
||||
|
||||
OutputDir = os.path.dirname(VpdFileName)
|
||||
FileName = os.path.basename(VpdFileName)
|
||||
BaseName, ext = os.path.splitext(FileName)
|
||||
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
|
||||
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
|
||||
|
||||
|
||||
try:
|
||||
PopenObject = subprocess.Popen(' '.join([ToolPath,
|
||||
'-o', OutputBinFileName,
|
||||
'-o', OutputBinFileName,
|
||||
'-m', OutputMapFileName,
|
||||
'-q',
|
||||
'-f',
|
||||
VpdFileName]),
|
||||
stdout=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr= subprocess.PIPE,
|
||||
shell=True)
|
||||
except Exception as X:
|
||||
@@ -252,11 +252,11 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
|
||||
print(out)
|
||||
while PopenObject.returncode is None :
|
||||
PopenObject.wait()
|
||||
|
||||
|
||||
if PopenObject.returncode != 0:
|
||||
if PopenObject.returncode != 0:
|
||||
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
|
||||
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
|
||||
(PopenObject.returncode, str(error)))
|
||||
|
||||
|
||||
return PopenObject.returncode
|
||||
|
Reference in New Issue
Block a user